symbian-qemu-0.9.1-12/python-2.6.1/Lib/test/test_generators.py
changeset 1 2fb8b9db1c86
equal deleted inserted replaced
0:ffa851df0825 1:2fb8b9db1c86
       
     1 tutorial_tests = """
       
     2 Let's try a simple generator:
       
     3 
       
     4     >>> def f():
       
     5     ...    yield 1
       
     6     ...    yield 2
       
     7 
       
     8     >>> for i in f():
       
     9     ...     print i
       
    10     1
       
    11     2
       
    12     >>> g = f()
       
    13     >>> g.next()
       
    14     1
       
    15     >>> g.next()
       
    16     2
       
    17 
       
    18 "Falling off the end" stops the generator:
       
    19 
       
    20     >>> g.next()
       
    21     Traceback (most recent call last):
       
    22       File "<stdin>", line 1, in ?
       
    23       File "<stdin>", line 2, in g
       
    24     StopIteration
       
    25 
       
    26 "return" also stops the generator:
       
    27 
       
    28     >>> def f():
       
    29     ...     yield 1
       
    30     ...     return
       
    31     ...     yield 2 # never reached
       
    32     ...
       
    33     >>> g = f()
       
    34     >>> g.next()
       
    35     1
       
    36     >>> g.next()
       
    37     Traceback (most recent call last):
       
    38       File "<stdin>", line 1, in ?
       
    39       File "<stdin>", line 3, in f
       
    40     StopIteration
       
    41     >>> g.next() # once stopped, can't be resumed
       
    42     Traceback (most recent call last):
       
    43       File "<stdin>", line 1, in ?
       
    44     StopIteration
       
    45 
       
    46 "raise StopIteration" stops the generator too:
       
    47 
       
    48     >>> def f():
       
    49     ...     yield 1
       
    50     ...     raise StopIteration
       
    51     ...     yield 2 # never reached
       
    52     ...
       
    53     >>> g = f()
       
    54     >>> g.next()
       
    55     1
       
    56     >>> g.next()
       
    57     Traceback (most recent call last):
       
    58       File "<stdin>", line 1, in ?
       
    59     StopIteration
       
    60     >>> g.next()
       
    61     Traceback (most recent call last):
       
    62       File "<stdin>", line 1, in ?
       
    63     StopIteration
       
    64 
       
    65 However, they are not exactly equivalent:
       
    66 
       
    67     >>> def g1():
       
    68     ...     try:
       
    69     ...         return
       
    70     ...     except:
       
    71     ...         yield 1
       
    72     ...
       
    73     >>> list(g1())
       
    74     []
       
    75 
       
    76     >>> def g2():
       
    77     ...     try:
       
    78     ...         raise StopIteration
       
    79     ...     except:
       
    80     ...         yield 42
       
    81     >>> print list(g2())
       
    82     [42]
       
    83 
       
    84 This may be surprising at first:
       
    85 
       
    86     >>> def g3():
       
    87     ...     try:
       
    88     ...         return
       
    89     ...     finally:
       
    90     ...         yield 1
       
    91     ...
       
    92     >>> list(g3())
       
    93     [1]
       
    94 
       
    95 Let's create an alternate range() function implemented as a generator:
       
    96 
       
    97     >>> def yrange(n):
       
    98     ...     for i in range(n):
       
    99     ...         yield i
       
   100     ...
       
   101     >>> list(yrange(5))
       
   102     [0, 1, 2, 3, 4]
       
   103 
       
   104 Generators always return to the most recent caller:
       
   105 
       
   106     >>> def creator():
       
   107     ...     r = yrange(5)
       
   108     ...     print "creator", r.next()
       
   109     ...     return r
       
   110     ...
       
   111     >>> def caller():
       
   112     ...     r = creator()
       
   113     ...     for i in r:
       
   114     ...             print "caller", i
       
   115     ...
       
   116     >>> caller()
       
   117     creator 0
       
   118     caller 1
       
   119     caller 2
       
   120     caller 3
       
   121     caller 4
       
   122 
       
   123 Generators can call other generators:
       
   124 
       
   125     >>> def zrange(n):
       
   126     ...     for i in yrange(n):
       
   127     ...         yield i
       
   128     ...
       
   129     >>> list(zrange(5))
       
   130     [0, 1, 2, 3, 4]
       
   131 
       
   132 """
       
   133 
       
   134 # The examples from PEP 255.
       
   135 
       
   136 pep_tests = """
       
   137 
       
   138 Specification:  Yield
       
   139 
       
   140     Restriction:  A generator cannot be resumed while it is actively
       
   141     running:
       
   142 
       
   143     >>> def g():
       
   144     ...     i = me.next()
       
   145     ...     yield i
       
   146     >>> me = g()
       
   147     >>> me.next()
       
   148     Traceback (most recent call last):
       
   149      ...
       
   150       File "<string>", line 2, in g
       
   151     ValueError: generator already executing
       
   152 
       
   153 Specification: Return
       
   154 
       
   155     Note that return isn't always equivalent to raising StopIteration:  the
       
   156     difference lies in how enclosing try/except constructs are treated.
       
   157     For example,
       
   158 
       
   159         >>> def f1():
       
   160         ...     try:
       
   161         ...         return
       
   162         ...     except:
       
   163         ...        yield 1
       
   164         >>> print list(f1())
       
   165         []
       
   166 
       
   167     because, as in any function, return simply exits, but
       
   168 
       
   169         >>> def f2():
       
   170         ...     try:
       
   171         ...         raise StopIteration
       
   172         ...     except:
       
   173         ...         yield 42
       
   174         >>> print list(f2())
       
   175         [42]
       
   176 
       
   177     because StopIteration is captured by a bare "except", as is any
       
   178     exception.
       
   179 
       
   180 Specification: Generators and Exception Propagation
       
   181 
       
   182     >>> def f():
       
   183     ...     return 1//0
       
   184     >>> def g():
       
   185     ...     yield f()  # the zero division exception propagates
       
   186     ...     yield 42   # and we'll never get here
       
   187     >>> k = g()
       
   188     >>> k.next()
       
   189     Traceback (most recent call last):
       
   190       File "<stdin>", line 1, in ?
       
   191       File "<stdin>", line 2, in g
       
   192       File "<stdin>", line 2, in f
       
   193     ZeroDivisionError: integer division or modulo by zero
       
   194     >>> k.next()  # and the generator cannot be resumed
       
   195     Traceback (most recent call last):
       
   196       File "<stdin>", line 1, in ?
       
   197     StopIteration
       
   198     >>>
       
   199 
       
   200 Specification: Try/Except/Finally
       
   201 
       
   202     >>> def f():
       
   203     ...     try:
       
   204     ...         yield 1
       
   205     ...         try:
       
   206     ...             yield 2
       
   207     ...             1//0
       
   208     ...             yield 3  # never get here
       
   209     ...         except ZeroDivisionError:
       
   210     ...             yield 4
       
   211     ...             yield 5
       
   212     ...             raise
       
   213     ...         except:
       
   214     ...             yield 6
       
   215     ...         yield 7     # the "raise" above stops this
       
   216     ...     except:
       
   217     ...         yield 8
       
   218     ...     yield 9
       
   219     ...     try:
       
   220     ...         x = 12
       
   221     ...     finally:
       
   222     ...         yield 10
       
   223     ...     yield 11
       
   224     >>> print list(f())
       
   225     [1, 2, 4, 5, 8, 9, 10, 11]
       
   226     >>>
       
   227 
       
   228 Guido's binary tree example.
       
   229 
       
   230     >>> # A binary tree class.
       
   231     >>> class Tree:
       
   232     ...
       
   233     ...     def __init__(self, label, left=None, right=None):
       
   234     ...         self.label = label
       
   235     ...         self.left = left
       
   236     ...         self.right = right
       
   237     ...
       
   238     ...     def __repr__(self, level=0, indent="    "):
       
   239     ...         s = level*indent + repr(self.label)
       
   240     ...         if self.left:
       
   241     ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
       
   242     ...         if self.right:
       
   243     ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
       
   244     ...         return s
       
   245     ...
       
   246     ...     def __iter__(self):
       
   247     ...         return inorder(self)
       
   248 
       
   249     >>> # Create a Tree from a list.
       
   250     >>> def tree(list):
       
   251     ...     n = len(list)
       
   252     ...     if n == 0:
       
   253     ...         return []
       
   254     ...     i = n // 2
       
   255     ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
       
   256 
       
   257     >>> # Show it off: create a tree.
       
   258     >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
       
   259 
       
   260     >>> # A recursive generator that generates Tree labels in in-order.
       
   261     >>> def inorder(t):
       
   262     ...     if t:
       
   263     ...         for x in inorder(t.left):
       
   264     ...             yield x
       
   265     ...         yield t.label
       
   266     ...         for x in inorder(t.right):
       
   267     ...             yield x
       
   268 
       
   269     >>> # Show it off: create a tree.
       
   270     >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
       
   271     >>> # Print the nodes of the tree in in-order.
       
   272     >>> for x in t:
       
   273     ...     print x,
       
   274     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
       
   275 
       
   276     >>> # A non-recursive generator.
       
   277     >>> def inorder(node):
       
   278     ...     stack = []
       
   279     ...     while node:
       
   280     ...         while node.left:
       
   281     ...             stack.append(node)
       
   282     ...             node = node.left
       
   283     ...         yield node.label
       
   284     ...         while not node.right:
       
   285     ...             try:
       
   286     ...                 node = stack.pop()
       
   287     ...             except IndexError:
       
   288     ...                 return
       
   289     ...             yield node.label
       
   290     ...         node = node.right
       
   291 
       
   292     >>> # Exercise the non-recursive generator.
       
   293     >>> for x in t:
       
   294     ...     print x,
       
   295     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
       
   296 
       
   297 """
       
   298 
       
   299 # Examples from Iterator-List and Python-Dev and c.l.py.
       
   300 
       
   301 email_tests = """
       
   302 
       
   303 The difference between yielding None and returning it.
       
   304 
       
   305 >>> def g():
       
   306 ...     for i in range(3):
       
   307 ...         yield None
       
   308 ...     yield None
       
   309 ...     return
       
   310 >>> list(g())
       
   311 [None, None, None, None]
       
   312 
       
   313 Ensure that explicitly raising StopIteration acts like any other exception
       
   314 in try/except, not like a return.
       
   315 
       
   316 >>> def g():
       
   317 ...     yield 1
       
   318 ...     try:
       
   319 ...         raise StopIteration
       
   320 ...     except:
       
   321 ...         yield 2
       
   322 ...     yield 3
       
   323 >>> list(g())
       
   324 [1, 2, 3]
       
   325 
       
   326 Next one was posted to c.l.py.
       
   327 
       
   328 >>> def gcomb(x, k):
       
   329 ...     "Generate all combinations of k elements from list x."
       
   330 ...
       
   331 ...     if k > len(x):
       
   332 ...         return
       
   333 ...     if k == 0:
       
   334 ...         yield []
       
   335 ...     else:
       
   336 ...         first, rest = x[0], x[1:]
       
   337 ...         # A combination does or doesn't contain first.
       
   338 ...         # If it does, the remainder is a k-1 comb of rest.
       
   339 ...         for c in gcomb(rest, k-1):
       
   340 ...             c.insert(0, first)
       
   341 ...             yield c
       
   342 ...         # If it doesn't contain first, it's a k comb of rest.
       
   343 ...         for c in gcomb(rest, k):
       
   344 ...             yield c
       
   345 
       
   346 >>> seq = range(1, 5)
       
   347 >>> for k in range(len(seq) + 2):
       
   348 ...     print "%d-combs of %s:" % (k, seq)
       
   349 ...     for c in gcomb(seq, k):
       
   350 ...         print "   ", c
       
   351 0-combs of [1, 2, 3, 4]:
       
   352     []
       
   353 1-combs of [1, 2, 3, 4]:
       
   354     [1]
       
   355     [2]
       
   356     [3]
       
   357     [4]
       
   358 2-combs of [1, 2, 3, 4]:
       
   359     [1, 2]
       
   360     [1, 3]
       
   361     [1, 4]
       
   362     [2, 3]
       
   363     [2, 4]
       
   364     [3, 4]
       
   365 3-combs of [1, 2, 3, 4]:
       
   366     [1, 2, 3]
       
   367     [1, 2, 4]
       
   368     [1, 3, 4]
       
   369     [2, 3, 4]
       
   370 4-combs of [1, 2, 3, 4]:
       
   371     [1, 2, 3, 4]
       
   372 5-combs of [1, 2, 3, 4]:
       
   373 
       
   374 From the Iterators list, about the types of these things.
       
   375 
       
   376 >>> def g():
       
   377 ...     yield 1
       
   378 ...
       
   379 >>> type(g)
       
   380 <type 'function'>
       
   381 >>> i = g()
       
   382 >>> type(i)
       
   383 <type 'generator'>
       
   384 >>> [s for s in dir(i) if not s.startswith('_')]
       
   385 ['close', 'gi_code', 'gi_frame', 'gi_running', 'next', 'send', 'throw']
       
   386 >>> print i.next.__doc__
       
   387 x.next() -> the next value, or raise StopIteration
       
   388 >>> iter(i) is i
       
   389 True
       
   390 >>> import types
       
   391 >>> isinstance(i, types.GeneratorType)
       
   392 True
       
   393 
       
   394 And more, added later.
       
   395 
       
   396 >>> i.gi_running
       
   397 0
       
   398 >>> type(i.gi_frame)
       
   399 <type 'frame'>
       
   400 >>> i.gi_running = 42
       
   401 Traceback (most recent call last):
       
   402   ...
       
   403 TypeError: readonly attribute
       
   404 >>> def g():
       
   405 ...     yield me.gi_running
       
   406 >>> me = g()
       
   407 >>> me.gi_running
       
   408 0
       
   409 >>> me.next()
       
   410 1
       
   411 >>> me.gi_running
       
   412 0
       
   413 
       
   414 A clever union-find implementation from c.l.py, due to David Eppstein.
       
   415 Sent: Friday, June 29, 2001 12:16 PM
       
   416 To: python-list@python.org
       
   417 Subject: Re: PEP 255: Simple Generators
       
   418 
       
   419 >>> class disjointSet:
       
   420 ...     def __init__(self, name):
       
   421 ...         self.name = name
       
   422 ...         self.parent = None
       
   423 ...         self.generator = self.generate()
       
   424 ...
       
   425 ...     def generate(self):
       
   426 ...         while not self.parent:
       
   427 ...             yield self
       
   428 ...         for x in self.parent.generator:
       
   429 ...             yield x
       
   430 ...
       
   431 ...     def find(self):
       
   432 ...         return self.generator.next()
       
   433 ...
       
   434 ...     def union(self, parent):
       
   435 ...         if self.parent:
       
   436 ...             raise ValueError("Sorry, I'm not a root!")
       
   437 ...         self.parent = parent
       
   438 ...
       
   439 ...     def __str__(self):
       
   440 ...         return self.name
       
   441 
       
   442 >>> names = "ABCDEFGHIJKLM"
       
   443 >>> sets = [disjointSet(name) for name in names]
       
   444 >>> roots = sets[:]
       
   445 
       
   446 >>> import random
       
   447 >>> gen = random.WichmannHill(42)
       
   448 >>> while 1:
       
   449 ...     for s in sets:
       
   450 ...         print "%s->%s" % (s, s.find()),
       
   451 ...     print
       
   452 ...     if len(roots) > 1:
       
   453 ...         s1 = gen.choice(roots)
       
   454 ...         roots.remove(s1)
       
   455 ...         s2 = gen.choice(roots)
       
   456 ...         s1.union(s2)
       
   457 ...         print "merged", s1, "into", s2
       
   458 ...     else:
       
   459 ...         break
       
   460 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
       
   461 merged D into G
       
   462 A->A B->B C->C D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M
       
   463 merged C into F
       
   464 A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M
       
   465 merged L into A
       
   466 A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->A M->M
       
   467 merged H into E
       
   468 A->A B->B C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M
       
   469 merged B into E
       
   470 A->A B->E C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M
       
   471 merged J into G
       
   472 A->A B->E C->F D->G E->E F->F G->G H->E I->I J->G K->K L->A M->M
       
   473 merged E into G
       
   474 A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->M
       
   475 merged M into G
       
   476 A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->G
       
   477 merged I into K
       
   478 A->A B->G C->F D->G E->G F->F G->G H->G I->K J->G K->K L->A M->G
       
   479 merged K into A
       
   480 A->A B->G C->F D->G E->G F->F G->G H->G I->A J->G K->A L->A M->G
       
   481 merged F into A
       
   482 A->A B->G C->A D->G E->G F->A G->G H->G I->A J->G K->A L->A M->G
       
   483 merged A into G
       
   484 A->G B->G C->G D->G E->G F->G G->G H->G I->G J->G K->G L->G M->G
       
   485 
       
   486 """
       
   487 # Emacs turd '
       
   488 
       
   489 # Fun tests (for sufficiently warped notions of "fun").
       
   490 
       
   491 fun_tests = """
       
   492 
       
   493 Build up to a recursive Sieve of Eratosthenes generator.
       
   494 
       
   495 >>> def firstn(g, n):
       
   496 ...     return [g.next() for i in range(n)]
       
   497 
       
   498 >>> def intsfrom(i):
       
   499 ...     while 1:
       
   500 ...         yield i
       
   501 ...         i += 1
       
   502 
       
   503 >>> firstn(intsfrom(5), 7)
       
   504 [5, 6, 7, 8, 9, 10, 11]
       
   505 
       
   506 >>> def exclude_multiples(n, ints):
       
   507 ...     for i in ints:
       
   508 ...         if i % n:
       
   509 ...             yield i
       
   510 
       
   511 >>> firstn(exclude_multiples(3, intsfrom(1)), 6)
       
   512 [1, 2, 4, 5, 7, 8]
       
   513 
       
   514 >>> def sieve(ints):
       
   515 ...     prime = ints.next()
       
   516 ...     yield prime
       
   517 ...     not_divisible_by_prime = exclude_multiples(prime, ints)
       
   518 ...     for p in sieve(not_divisible_by_prime):
       
   519 ...         yield p
       
   520 
       
   521 >>> primes = sieve(intsfrom(2))
       
   522 >>> firstn(primes, 20)
       
   523 [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
       
   524 
       
   525 
       
   526 Another famous problem:  generate all integers of the form
       
   527     2**i * 3**j  * 5**k
       
   528 in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
       
   529 Try writing it without generators, and correctly, and without generating
       
   530 3 internal results for each result output.
       
   531 
       
   532 >>> def times(n, g):
       
   533 ...     for i in g:
       
   534 ...         yield n * i
       
   535 >>> firstn(times(10, intsfrom(1)), 10)
       
   536 [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
       
   537 
       
   538 >>> def merge(g, h):
       
   539 ...     ng = g.next()
       
   540 ...     nh = h.next()
       
   541 ...     while 1:
       
   542 ...         if ng < nh:
       
   543 ...             yield ng
       
   544 ...             ng = g.next()
       
   545 ...         elif ng > nh:
       
   546 ...             yield nh
       
   547 ...             nh = h.next()
       
   548 ...         else:
       
   549 ...             yield ng
       
   550 ...             ng = g.next()
       
   551 ...             nh = h.next()
       
   552 
       
   553 The following works, but is doing a whale of a lot of redundant work --
       
   554 it's not clear how to get the internal uses of m235 to share a single
       
   555 generator.  Note that me_times2 (etc) each need to see every element in the
       
   556 result sequence.  So this is an example where lazy lists are more natural
       
   557 (you can look at the head of a lazy list any number of times).
       
   558 
       
   559 >>> def m235():
       
   560 ...     yield 1
       
   561 ...     me_times2 = times(2, m235())
       
   562 ...     me_times3 = times(3, m235())
       
   563 ...     me_times5 = times(5, m235())
       
   564 ...     for i in merge(merge(me_times2,
       
   565 ...                          me_times3),
       
   566 ...                    me_times5):
       
   567 ...         yield i
       
   568 
       
   569 Don't print "too many" of these -- the implementation above is extremely
       
   570 inefficient:  each call of m235() leads to 3 recursive calls, and in
       
   571 turn each of those 3 more, and so on, and so on, until we've descended
       
   572 enough levels to satisfy the print stmts.  Very odd:  when I printed 5
       
   573 lines of results below, this managed to screw up Win98's malloc in "the
       
   574 usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
       
   575 address space, and it *looked* like a very slow leak.
       
   576 
       
   577 >>> result = m235()
       
   578 >>> for i in range(3):
       
   579 ...     print firstn(result, 15)
       
   580 [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
       
   581 [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
       
   582 [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
       
   583 
       
   584 Heh.  Here's one way to get a shared list, complete with an excruciating
       
   585 namespace renaming trick.  The *pretty* part is that the times() and merge()
       
   586 functions can be reused as-is, because they only assume their stream
       
   587 arguments are iterable -- a LazyList is the same as a generator to times().
       
   588 
       
   589 >>> class LazyList:
       
   590 ...     def __init__(self, g):
       
   591 ...         self.sofar = []
       
   592 ...         self.fetch = g.next
       
   593 ...
       
   594 ...     def __getitem__(self, i):
       
   595 ...         sofar, fetch = self.sofar, self.fetch
       
   596 ...         while i >= len(sofar):
       
   597 ...             sofar.append(fetch())
       
   598 ...         return sofar[i]
       
   599 
       
   600 >>> def m235():
       
   601 ...     yield 1
       
   602 ...     # Gack:  m235 below actually refers to a LazyList.
       
   603 ...     me_times2 = times(2, m235)
       
   604 ...     me_times3 = times(3, m235)
       
   605 ...     me_times5 = times(5, m235)
       
   606 ...     for i in merge(merge(me_times2,
       
   607 ...                          me_times3),
       
   608 ...                    me_times5):
       
   609 ...         yield i
       
   610 
       
   611 Print as many of these as you like -- *this* implementation is memory-
       
   612 efficient.
       
   613 
       
   614 >>> m235 = LazyList(m235())
       
   615 >>> for i in range(5):
       
   616 ...     print [m235[j] for j in range(15*i, 15*(i+1))]
       
   617 [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
       
   618 [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
       
   619 [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
       
   620 [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
       
   621 [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
       
   622 
       
   623 Ye olde Fibonacci generator, LazyList style.
       
   624 
       
   625 >>> def fibgen(a, b):
       
   626 ...
       
   627 ...     def sum(g, h):
       
   628 ...         while 1:
       
   629 ...             yield g.next() + h.next()
       
   630 ...
       
   631 ...     def tail(g):
       
   632 ...         g.next()    # throw first away
       
   633 ...         for x in g:
       
   634 ...             yield x
       
   635 ...
       
   636 ...     yield a
       
   637 ...     yield b
       
   638 ...     for s in sum(iter(fib),
       
   639 ...                  tail(iter(fib))):
       
   640 ...         yield s
       
   641 
       
   642 >>> fib = LazyList(fibgen(1, 2))
       
   643 >>> firstn(iter(fib), 17)
       
   644 [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
       
   645 
       
   646 
       
   647 Running after your tail with itertools.tee (new in version 2.4)
       
   648 
       
   649 The algorithms "m235" (Hamming) and Fibonacci presented above are both
       
   650 examples of a whole family of FP (functional programming) algorithms
       
   651 where a function produces and returns a list while the production algorithm
       
   652 suppose the list as already produced by recursively calling itself.
       
   653 For these algorithms to work, they must:
       
   654 
       
   655 - produce at least a first element without presupposing the existence of
       
   656   the rest of the list
       
   657 - produce their elements in a lazy manner
       
   658 
       
   659 To work efficiently, the beginning of the list must not be recomputed over
       
   660 and over again. This is ensured in most FP languages as a built-in feature.
       
   661 In python, we have to explicitly maintain a list of already computed results
       
   662 and abandon genuine recursivity.
       
   663 
       
   664 This is what had been attempted above with the LazyList class. One problem
       
   665 with that class is that it keeps a list of all of the generated results and
       
   666 therefore continually grows. This partially defeats the goal of the generator
       
   667 concept, viz. produce the results only as needed instead of producing them
       
   668 all and thereby wasting memory.
       
   669 
       
   670 Thanks to itertools.tee, it is now clear "how to get the internal uses of
       
   671 m235 to share a single generator".
       
   672 
       
   673 >>> from itertools import tee
       
   674 >>> def m235():
       
   675 ...     def _m235():
       
   676 ...         yield 1
       
   677 ...         for n in merge(times(2, m2),
       
   678 ...                        merge(times(3, m3),
       
   679 ...                              times(5, m5))):
       
   680 ...             yield n
       
   681 ...     m1 = _m235()
       
   682 ...     m2, m3, m5, mRes = tee(m1, 4)
       
   683 ...     return mRes
       
   684 
       
   685 >>> it = m235()
       
   686 >>> for i in range(5):
       
   687 ...     print firstn(it, 15)
       
   688 [1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
       
   689 [25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
       
   690 [81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
       
   691 [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
       
   692 [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
       
   693 
       
   694 The "tee" function does just what we want. It internally keeps a generated
       
   695 result for as long as it has not been "consumed" from all of the duplicated
       
   696 iterators, whereupon it is deleted. You can therefore print the hamming
       
   697 sequence during hours without increasing memory usage, or very little.
       
   698 
       
   699 The beauty of it is that recursive running-after-their-tail FP algorithms
       
   700 are quite straightforwardly expressed with this Python idiom.
       
   701 
       
   702 Ye olde Fibonacci generator, tee style.
       
   703 
       
   704 >>> def fib():
       
   705 ...
       
   706 ...     def _isum(g, h):
       
   707 ...         while 1:
       
   708 ...             yield g.next() + h.next()
       
   709 ...
       
   710 ...     def _fib():
       
   711 ...         yield 1
       
   712 ...         yield 2
       
   713 ...         fibTail.next() # throw first away
       
   714 ...         for res in _isum(fibHead, fibTail):
       
   715 ...             yield res
       
   716 ...
       
   717 ...     realfib = _fib()
       
   718 ...     fibHead, fibTail, fibRes = tee(realfib, 3)
       
   719 ...     return fibRes
       
   720 
       
   721 >>> firstn(fib(), 17)
       
   722 [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
       
   723 
       
   724 """
       
   725 
       
   726 # syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0
       
   727 # hackery.
       
   728 
       
   729 syntax_tests = """
       
   730 
       
   731 >>> def f():
       
   732 ...     return 22
       
   733 ...     yield 1
       
   734 Traceback (most recent call last):
       
   735   ..
       
   736 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[0]>, line 3)
       
   737 
       
   738 >>> def f():
       
   739 ...     yield 1
       
   740 ...     return 22
       
   741 Traceback (most recent call last):
       
   742   ..
       
   743 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[1]>, line 3)
       
   744 
       
   745 "return None" is not the same as "return" in a generator:
       
   746 
       
   747 >>> def f():
       
   748 ...     yield 1
       
   749 ...     return None
       
   750 Traceback (most recent call last):
       
   751   ..
       
   752 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[2]>, line 3)
       
   753 
       
   754 These are fine:
       
   755 
       
   756 >>> def f():
       
   757 ...     yield 1
       
   758 ...     return
       
   759 
       
   760 >>> def f():
       
   761 ...     try:
       
   762 ...         yield 1
       
   763 ...     finally:
       
   764 ...         pass
       
   765 
       
   766 >>> def f():
       
   767 ...     try:
       
   768 ...         try:
       
   769 ...             1//0
       
   770 ...         except ZeroDivisionError:
       
   771 ...             yield 666
       
   772 ...         except:
       
   773 ...             pass
       
   774 ...     finally:
       
   775 ...         pass
       
   776 
       
   777 >>> def f():
       
   778 ...     try:
       
   779 ...         try:
       
   780 ...             yield 12
       
   781 ...             1//0
       
   782 ...         except ZeroDivisionError:
       
   783 ...             yield 666
       
   784 ...         except:
       
   785 ...             try:
       
   786 ...                 x = 12
       
   787 ...             finally:
       
   788 ...                 yield 12
       
   789 ...     except:
       
   790 ...         return
       
   791 >>> list(f())
       
   792 [12, 666]
       
   793 
       
   794 >>> def f():
       
   795 ...    yield
       
   796 >>> type(f())
       
   797 <type 'generator'>
       
   798 
       
   799 
       
   800 >>> def f():
       
   801 ...    if 0:
       
   802 ...        yield
       
   803 >>> type(f())
       
   804 <type 'generator'>
       
   805 
       
   806 
       
   807 >>> def f():
       
   808 ...     if 0:
       
   809 ...         yield 1
       
   810 >>> type(f())
       
   811 <type 'generator'>
       
   812 
       
   813 >>> def f():
       
   814 ...    if "":
       
   815 ...        yield None
       
   816 >>> type(f())
       
   817 <type 'generator'>
       
   818 
       
   819 >>> def f():
       
   820 ...     return
       
   821 ...     try:
       
   822 ...         if x==4:
       
   823 ...             pass
       
   824 ...         elif 0:
       
   825 ...             try:
       
   826 ...                 1//0
       
   827 ...             except SyntaxError:
       
   828 ...                 pass
       
   829 ...             else:
       
   830 ...                 if 0:
       
   831 ...                     while 12:
       
   832 ...                         x += 1
       
   833 ...                         yield 2 # don't blink
       
   834 ...                         f(a, b, c, d, e)
       
   835 ...         else:
       
   836 ...             pass
       
   837 ...     except:
       
   838 ...         x = 1
       
   839 ...     return
       
   840 >>> type(f())
       
   841 <type 'generator'>
       
   842 
       
   843 >>> def f():
       
   844 ...     if 0:
       
   845 ...         def g():
       
   846 ...             yield 1
       
   847 ...
       
   848 >>> type(f())
       
   849 <type 'NoneType'>
       
   850 
       
   851 >>> def f():
       
   852 ...     if 0:
       
   853 ...         class C:
       
   854 ...             def __init__(self):
       
   855 ...                 yield 1
       
   856 ...             def f(self):
       
   857 ...                 yield 2
       
   858 >>> type(f())
       
   859 <type 'NoneType'>
       
   860 
       
   861 >>> def f():
       
   862 ...     if 0:
       
   863 ...         return
       
   864 ...     if 0:
       
   865 ...         yield 2
       
   866 >>> type(f())
       
   867 <type 'generator'>
       
   868 
       
   869 
       
   870 >>> def f():
       
   871 ...     if 0:
       
   872 ...         lambda x:  x        # shouldn't trigger here
       
   873 ...         return              # or here
       
   874 ...         def f(i):
       
   875 ...             return 2*i      # or here
       
   876 ...         if 0:
       
   877 ...             return 3        # but *this* sucks (line 8)
       
   878 ...     if 0:
       
   879 ...         yield 2             # because it's a generator (line 10)
       
   880 Traceback (most recent call last):
       
   881 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[24]>, line 10)
       
   882 
       
   883 This one caused a crash (see SF bug 567538):
       
   884 
       
   885 >>> def f():
       
   886 ...     for i in range(3):
       
   887 ...         try:
       
   888 ...             continue
       
   889 ...         finally:
       
   890 ...             yield i
       
   891 ...
       
   892 >>> g = f()
       
   893 >>> print g.next()
       
   894 0
       
   895 >>> print g.next()
       
   896 1
       
   897 >>> print g.next()
       
   898 2
       
   899 >>> print g.next()
       
   900 Traceback (most recent call last):
       
   901 StopIteration
       
   902 
       
   903 
       
   904 Test the gi_code attribute
       
   905 
       
   906 >>> def f():
       
   907 ...     yield 5
       
   908 ...
       
   909 >>> g = f()
       
   910 >>> g.gi_code is f.func_code
       
   911 True
       
   912 >>> g.next()
       
   913 5
       
   914 >>> g.next()
       
   915 Traceback (most recent call last):
       
   916 StopIteration
       
   917 >>> g.gi_code is f.func_code
       
   918 True
       
   919 
       
   920 
       
   921 Test the __name__ attribute and the repr()
       
   922 
       
   923 >>> def f():
       
   924 ...    yield 5
       
   925 ...
       
   926 >>> g = f()
       
   927 >>> g.__name__
       
   928 'f'
       
   929 >>> repr(g)  # doctest: +ELLIPSIS
       
   930 '<generator object f at ...>'
       
   931 """
       
   932 
       
   933 # conjoin is a simple backtracking generator, named in honor of Icon's
       
   934 # "conjunction" control structure.  Pass a list of no-argument functions
       
   935 # that return iterable objects.  Easiest to explain by example:  assume the
       
   936 # function list [x, y, z] is passed.  Then conjoin acts like:
       
   937 #
       
   938 # def g():
       
   939 #     values = [None] * 3
       
   940 #     for values[0] in x():
       
   941 #         for values[1] in y():
       
   942 #             for values[2] in z():
       
   943 #                 yield values
       
   944 #
       
   945 # So some 3-lists of values *may* be generated, each time we successfully
       
   946 # get into the innermost loop.  If an iterator fails (is exhausted) before
       
   947 # then, it "backtracks" to get the next value from the nearest enclosing
       
   948 # iterator (the one "to the left"), and starts all over again at the next
       
   949 # slot (pumps a fresh iterator).  Of course this is most useful when the
       
   950 # iterators have side-effects, so that which values *can* be generated at
       
   951 # each slot depend on the values iterated at previous slots.
       
   952 
       
   953 def conjoin(gs):
       
   954 
       
   955     values = [None] * len(gs)
       
   956 
       
   957     def gen(i, values=values):
       
   958         if i >= len(gs):
       
   959             yield values
       
   960         else:
       
   961             for values[i] in gs[i]():
       
   962                 for x in gen(i+1):
       
   963                     yield x
       
   964 
       
   965     for x in gen(0):
       
   966         yield x
       
   967 
       
   968 # That works fine, but recursing a level and checking i against len(gs) for
       
   969 # each item produced is inefficient.  By doing manual loop unrolling across
       
   970 # generator boundaries, it's possible to eliminate most of that overhead.
       
   971 # This isn't worth the bother *in general* for generators, but conjoin() is
       
   972 # a core building block for some CPU-intensive generator applications.
       
   973 
       
   974 def conjoin(gs):
       
   975 
       
   976     n = len(gs)
       
   977     values = [None] * n
       
   978 
       
   979     # Do one loop nest at time recursively, until the # of loop nests
       
   980     # remaining is divisible by 3.
       
   981 
       
   982     def gen(i, values=values):
       
   983         if i >= n:
       
   984             yield values
       
   985 
       
   986         elif (n-i) % 3:
       
   987             ip1 = i+1
       
   988             for values[i] in gs[i]():
       
   989                 for x in gen(ip1):
       
   990                     yield x
       
   991 
       
   992         else:
       
   993             for x in _gen3(i):
       
   994                 yield x
       
   995 
       
   996     # Do three loop nests at a time, recursing only if at least three more
       
   997     # remain.  Don't call directly:  this is an internal optimization for
       
   998     # gen's use.
       
   999 
       
  1000     def _gen3(i, values=values):
       
  1001         assert i < n and (n-i) % 3 == 0
       
  1002         ip1, ip2, ip3 = i+1, i+2, i+3
       
  1003         g, g1, g2 = gs[i : ip3]
       
  1004 
       
  1005         if ip3 >= n:
       
  1006             # These are the last three, so we can yield values directly.
       
  1007             for values[i] in g():
       
  1008                 for values[ip1] in g1():
       
  1009                     for values[ip2] in g2():
       
  1010                         yield values
       
  1011 
       
  1012         else:
       
  1013             # At least 6 loop nests remain; peel off 3 and recurse for the
       
  1014             # rest.
       
  1015             for values[i] in g():
       
  1016                 for values[ip1] in g1():
       
  1017                     for values[ip2] in g2():
       
  1018                         for x in _gen3(ip3):
       
  1019                             yield x
       
  1020 
       
  1021     for x in gen(0):
       
  1022         yield x
       
  1023 
       
  1024 # And one more approach:  For backtracking apps like the Knight's Tour
       
  1025 # solver below, the number of backtracking levels can be enormous (one
       
  1026 # level per square, for the Knight's Tour, so that e.g. a 100x100 board
       
  1027 # needs 10,000 levels).  In such cases Python is likely to run out of
       
  1028 # stack space due to recursion.  So here's a recursion-free version of
       
  1029 # conjoin too.
       
  1030 # NOTE WELL:  This allows large problems to be solved with only trivial
       
  1031 # demands on stack space.  Without explicitly resumable generators, this is
       
  1032 # much harder to achieve.  OTOH, this is much slower (up to a factor of 2)
       
  1033 # than the fancy unrolled recursive conjoin.
       
  1034 
       
  1035 def flat_conjoin(gs):  # rename to conjoin to run tests with this instead
       
  1036     n = len(gs)
       
  1037     values = [None] * n
       
  1038     iters  = [None] * n
       
  1039     _StopIteration = StopIteration  # make local because caught a *lot*
       
  1040     i = 0
       
  1041     while 1:
       
  1042         # Descend.
       
  1043         try:
       
  1044             while i < n:
       
  1045                 it = iters[i] = gs[i]().next
       
  1046                 values[i] = it()
       
  1047                 i += 1
       
  1048         except _StopIteration:
       
  1049             pass
       
  1050         else:
       
  1051             assert i == n
       
  1052             yield values
       
  1053 
       
  1054         # Backtrack until an older iterator can be resumed.
       
  1055         i -= 1
       
  1056         while i >= 0:
       
  1057             try:
       
  1058                 values[i] = iters[i]()
       
  1059                 # Success!  Start fresh at next level.
       
  1060                 i += 1
       
  1061                 break
       
  1062             except _StopIteration:
       
  1063                 # Continue backtracking.
       
  1064                 i -= 1
       
  1065         else:
       
  1066             assert i < 0
       
  1067             break
       
  1068 
       
  1069 # A conjoin-based N-Queens solver.
       
  1070 
       
  1071 class Queens:
       
  1072     def __init__(self, n):
       
  1073         self.n = n
       
  1074         rangen = range(n)
       
  1075 
       
  1076         # Assign a unique int to each column and diagonal.
       
  1077         # columns:  n of those, range(n).
       
  1078         # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
       
  1079         # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
       
  1080         # based.
       
  1081         # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
       
  1082         # each, smallest i+j is 0, largest is 2n-2.
       
  1083 
       
  1084         # For each square, compute a bit vector of the columns and
       
  1085         # diagonals it covers, and for each row compute a function that
       
  1086         # generates the possiblities for the columns in that row.
       
  1087         self.rowgenerators = []
       
  1088         for i in rangen:
       
  1089             rowuses = [(1L << j) |                  # column ordinal
       
  1090                        (1L << (n + i-j + n-1)) |    # NW-SE ordinal
       
  1091                        (1L << (n + 2*n-1 + i+j))    # NE-SW ordinal
       
  1092                             for j in rangen]
       
  1093 
       
  1094             def rowgen(rowuses=rowuses):
       
  1095                 for j in rangen:
       
  1096                     uses = rowuses[j]
       
  1097                     if uses & self.used == 0:
       
  1098                         self.used |= uses
       
  1099                         yield j
       
  1100                         self.used &= ~uses
       
  1101 
       
  1102             self.rowgenerators.append(rowgen)
       
  1103 
       
  1104     # Generate solutions.
       
  1105     def solve(self):
       
  1106         self.used = 0
       
  1107         for row2col in conjoin(self.rowgenerators):
       
  1108             yield row2col
       
  1109 
       
  1110     def printsolution(self, row2col):
       
  1111         n = self.n
       
  1112         assert n == len(row2col)
       
  1113         sep = "+" + "-+" * n
       
  1114         print sep
       
  1115         for i in range(n):
       
  1116             squares = [" " for j in range(n)]
       
  1117             squares[row2col[i]] = "Q"
       
  1118             print "|" + "|".join(squares) + "|"
       
  1119             print sep
       
  1120 
       
  1121 # A conjoin-based Knight's Tour solver.  This is pretty sophisticated
       
  1122 # (e.g., when used with flat_conjoin above, and passing hard=1 to the
       
  1123 # constructor, a 200x200 Knight's Tour was found quickly -- note that we're
       
  1124 # creating 10s of thousands of generators then!), and is lengthy.
       
  1125 
       
  1126 class Knights:
       
  1127     def __init__(self, m, n, hard=0):
       
  1128         self.m, self.n = m, n
       
  1129 
       
  1130         # solve() will set up succs[i] to be a list of square #i's
       
  1131         # successors.
       
  1132         succs = self.succs = []
       
  1133 
       
  1134         # Remove i0 from each of its successor's successor lists, i.e.
       
  1135         # successors can't go back to i0 again.  Return 0 if we can
       
  1136         # detect this makes a solution impossible, else return 1.
       
  1137 
       
  1138         def remove_from_successors(i0, len=len):
       
  1139             # If we remove all exits from a free square, we're dead:
       
  1140             # even if we move to it next, we can't leave it again.
       
  1141             # If we create a square with one exit, we must visit it next;
       
  1142             # else somebody else will have to visit it, and since there's
       
  1143             # only one adjacent, there won't be a way to leave it again.
       
  1144             # Finelly, if we create more than one free square with a
       
  1145             # single exit, we can only move to one of them next, leaving
       
  1146             # the other one a dead end.
       
  1147             ne0 = ne1 = 0
       
  1148             for i in succs[i0]:
       
  1149                 s = succs[i]
       
  1150                 s.remove(i0)
       
  1151                 e = len(s)
       
  1152                 if e == 0:
       
  1153                     ne0 += 1
       
  1154                 elif e == 1:
       
  1155                     ne1 += 1
       
  1156             return ne0 == 0 and ne1 < 2
       
  1157 
       
  1158         # Put i0 back in each of its successor's successor lists.
       
  1159 
       
  1160         def add_to_successors(i0):
       
  1161             for i in succs[i0]:
       
  1162                 succs[i].append(i0)
       
  1163 
       
  1164         # Generate the first move.
       
  1165         def first():
       
  1166             if m < 1 or n < 1:
       
  1167                 return
       
  1168 
       
  1169             # Since we're looking for a cycle, it doesn't matter where we
       
  1170             # start.  Starting in a corner makes the 2nd move easy.
       
  1171             corner = self.coords2index(0, 0)
       
  1172             remove_from_successors(corner)
       
  1173             self.lastij = corner
       
  1174             yield corner
       
  1175             add_to_successors(corner)
       
  1176 
       
  1177         # Generate the second moves.
       
  1178         def second():
       
  1179             corner = self.coords2index(0, 0)
       
  1180             assert self.lastij == corner  # i.e., we started in the corner
       
  1181             if m < 3 or n < 3:
       
  1182                 return
       
  1183             assert len(succs[corner]) == 2
       
  1184             assert self.coords2index(1, 2) in succs[corner]
       
  1185             assert self.coords2index(2, 1) in succs[corner]
       
  1186             # Only two choices.  Whichever we pick, the other must be the
       
  1187             # square picked on move m*n, as it's the only way to get back
       
  1188             # to (0, 0).  Save its index in self.final so that moves before
       
  1189             # the last know it must be kept free.
       
  1190             for i, j in (1, 2), (2, 1):
       
  1191                 this  = self.coords2index(i, j)
       
  1192                 final = self.coords2index(3-i, 3-j)
       
  1193                 self.final = final
       
  1194 
       
  1195                 remove_from_successors(this)
       
  1196                 succs[final].append(corner)
       
  1197                 self.lastij = this
       
  1198                 yield this
       
  1199                 succs[final].remove(corner)
       
  1200                 add_to_successors(this)
       
  1201 
       
  1202         # Generate moves 3 thru m*n-1.
       
  1203         def advance(len=len):
       
  1204             # If some successor has only one exit, must take it.
       
  1205             # Else favor successors with fewer exits.
       
  1206             candidates = []
       
  1207             for i in succs[self.lastij]:
       
  1208                 e = len(succs[i])
       
  1209                 assert e > 0, "else remove_from_successors() pruning flawed"
       
  1210                 if e == 1:
       
  1211                     candidates = [(e, i)]
       
  1212                     break
       
  1213                 candidates.append((e, i))
       
  1214             else:
       
  1215                 candidates.sort()
       
  1216 
       
  1217             for e, i in candidates:
       
  1218                 if i != self.final:
       
  1219                     if remove_from_successors(i):
       
  1220                         self.lastij = i
       
  1221                         yield i
       
  1222                     add_to_successors(i)
       
  1223 
       
  1224         # Generate moves 3 thru m*n-1.  Alternative version using a
       
  1225         # stronger (but more expensive) heuristic to order successors.
       
  1226         # Since the # of backtracking levels is m*n, a poor move early on
       
  1227         # can take eons to undo.  Smallest square board for which this
       
  1228         # matters a lot is 52x52.
       
  1229         def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
       
  1230             # If some successor has only one exit, must take it.
       
  1231             # Else favor successors with fewer exits.
       
  1232             # Break ties via max distance from board centerpoint (favor
       
  1233             # corners and edges whenever possible).
       
  1234             candidates = []
       
  1235             for i in succs[self.lastij]:
       
  1236                 e = len(succs[i])
       
  1237                 assert e > 0, "else remove_from_successors() pruning flawed"
       
  1238                 if e == 1:
       
  1239                     candidates = [(e, 0, i)]
       
  1240                     break
       
  1241                 i1, j1 = self.index2coords(i)
       
  1242                 d = (i1 - vmid)**2 + (j1 - hmid)**2
       
  1243                 candidates.append((e, -d, i))
       
  1244             else:
       
  1245                 candidates.sort()
       
  1246 
       
  1247             for e, d, i in candidates:
       
  1248                 if i != self.final:
       
  1249                     if remove_from_successors(i):
       
  1250                         self.lastij = i
       
  1251                         yield i
       
  1252                     add_to_successors(i)
       
  1253 
       
  1254         # Generate the last move.
       
  1255         def last():
       
  1256             assert self.final in succs[self.lastij]
       
  1257             yield self.final
       
  1258 
       
  1259         if m*n < 4:
       
  1260             self.squaregenerators = [first]
       
  1261         else:
       
  1262             self.squaregenerators = [first, second] + \
       
  1263                 [hard and advance_hard or advance] * (m*n - 3) + \
       
  1264                 [last]
       
  1265 
       
  1266     def coords2index(self, i, j):
       
  1267         assert 0 <= i < self.m
       
  1268         assert 0 <= j < self.n
       
  1269         return i * self.n + j
       
  1270 
       
  1271     def index2coords(self, index):
       
  1272         assert 0 <= index < self.m * self.n
       
  1273         return divmod(index, self.n)
       
  1274 
       
  1275     def _init_board(self):
       
  1276         succs = self.succs
       
  1277         del succs[:]
       
  1278         m, n = self.m, self.n
       
  1279         c2i = self.coords2index
       
  1280 
       
  1281         offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
       
  1282                    (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
       
  1283         rangen = range(n)
       
  1284         for i in range(m):
       
  1285             for j in rangen:
       
  1286                 s = [c2i(i+io, j+jo) for io, jo in offsets
       
  1287                                      if 0 <= i+io < m and
       
  1288                                         0 <= j+jo < n]
       
  1289                 succs.append(s)
       
  1290 
       
  1291     # Generate solutions.
       
  1292     def solve(self):
       
  1293         self._init_board()
       
  1294         for x in conjoin(self.squaregenerators):
       
  1295             yield x
       
  1296 
       
  1297     def printsolution(self, x):
       
  1298         m, n = self.m, self.n
       
  1299         assert len(x) == m*n
       
  1300         w = len(str(m*n))
       
  1301         format = "%" + str(w) + "d"
       
  1302 
       
  1303         squares = [[None] * n for i in range(m)]
       
  1304         k = 1
       
  1305         for i in x:
       
  1306             i1, j1 = self.index2coords(i)
       
  1307             squares[i1][j1] = format % k
       
  1308             k += 1
       
  1309 
       
  1310         sep = "+" + ("-" * w + "+") * n
       
  1311         print sep
       
  1312         for i in range(m):
       
  1313             row = squares[i]
       
  1314             print "|" + "|".join(row) + "|"
       
  1315             print sep
       
  1316 
       
  1317 conjoin_tests = """
       
  1318 
       
  1319 Generate the 3-bit binary numbers in order.  This illustrates dumbest-
       
  1320 possible use of conjoin, just to generate the full cross-product.
       
  1321 
       
  1322 >>> for c in conjoin([lambda: iter((0, 1))] * 3):
       
  1323 ...     print c
       
  1324 [0, 0, 0]
       
  1325 [0, 0, 1]
       
  1326 [0, 1, 0]
       
  1327 [0, 1, 1]
       
  1328 [1, 0, 0]
       
  1329 [1, 0, 1]
       
  1330 [1, 1, 0]
       
  1331 [1, 1, 1]
       
  1332 
       
  1333 For efficiency in typical backtracking apps, conjoin() yields the same list
       
  1334 object each time.  So if you want to save away a full account of its
       
  1335 generated sequence, you need to copy its results.
       
  1336 
       
  1337 >>> def gencopy(iterator):
       
  1338 ...     for x in iterator:
       
  1339 ...         yield x[:]
       
  1340 
       
  1341 >>> for n in range(10):
       
  1342 ...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
       
  1343 ...     print n, len(all), all[0] == [0] * n, all[-1] == [1] * n
       
  1344 0 1 True True
       
  1345 1 2 True True
       
  1346 2 4 True True
       
  1347 3 8 True True
       
  1348 4 16 True True
       
  1349 5 32 True True
       
  1350 6 64 True True
       
  1351 7 128 True True
       
  1352 8 256 True True
       
  1353 9 512 True True
       
  1354 
       
  1355 And run an 8-queens solver.
       
  1356 
       
  1357 >>> q = Queens(8)
       
  1358 >>> LIMIT = 2
       
  1359 >>> count = 0
       
  1360 >>> for row2col in q.solve():
       
  1361 ...     count += 1
       
  1362 ...     if count <= LIMIT:
       
  1363 ...         print "Solution", count
       
  1364 ...         q.printsolution(row2col)
       
  1365 Solution 1
       
  1366 +-+-+-+-+-+-+-+-+
       
  1367 |Q| | | | | | | |
       
  1368 +-+-+-+-+-+-+-+-+
       
  1369 | | | | |Q| | | |
       
  1370 +-+-+-+-+-+-+-+-+
       
  1371 | | | | | | | |Q|
       
  1372 +-+-+-+-+-+-+-+-+
       
  1373 | | | | | |Q| | |
       
  1374 +-+-+-+-+-+-+-+-+
       
  1375 | | |Q| | | | | |
       
  1376 +-+-+-+-+-+-+-+-+
       
  1377 | | | | | | |Q| |
       
  1378 +-+-+-+-+-+-+-+-+
       
  1379 | |Q| | | | | | |
       
  1380 +-+-+-+-+-+-+-+-+
       
  1381 | | | |Q| | | | |
       
  1382 +-+-+-+-+-+-+-+-+
       
  1383 Solution 2
       
  1384 +-+-+-+-+-+-+-+-+
       
  1385 |Q| | | | | | | |
       
  1386 +-+-+-+-+-+-+-+-+
       
  1387 | | | | | |Q| | |
       
  1388 +-+-+-+-+-+-+-+-+
       
  1389 | | | | | | | |Q|
       
  1390 +-+-+-+-+-+-+-+-+
       
  1391 | | |Q| | | | | |
       
  1392 +-+-+-+-+-+-+-+-+
       
  1393 | | | | | | |Q| |
       
  1394 +-+-+-+-+-+-+-+-+
       
  1395 | | | |Q| | | | |
       
  1396 +-+-+-+-+-+-+-+-+
       
  1397 | |Q| | | | | | |
       
  1398 +-+-+-+-+-+-+-+-+
       
  1399 | | | | |Q| | | |
       
  1400 +-+-+-+-+-+-+-+-+
       
  1401 
       
  1402 >>> print count, "solutions in all."
       
  1403 92 solutions in all.
       
  1404 
       
  1405 And run a Knight's Tour on a 10x10 board.  Note that there are about
       
  1406 20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
       
  1407 
       
  1408 >>> k = Knights(10, 10)
       
  1409 >>> LIMIT = 2
       
  1410 >>> count = 0
       
  1411 >>> for x in k.solve():
       
  1412 ...     count += 1
       
  1413 ...     if count <= LIMIT:
       
  1414 ...         print "Solution", count
       
  1415 ...         k.printsolution(x)
       
  1416 ...     else:
       
  1417 ...         break
       
  1418 Solution 1
       
  1419 +---+---+---+---+---+---+---+---+---+---+
       
  1420 |  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
       
  1421 +---+---+---+---+---+---+---+---+---+---+
       
  1422 | 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
       
  1423 +---+---+---+---+---+---+---+---+---+---+
       
  1424 | 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
       
  1425 +---+---+---+---+---+---+---+---+---+---+
       
  1426 | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
       
  1427 +---+---+---+---+---+---+---+---+---+---+
       
  1428 | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
       
  1429 +---+---+---+---+---+---+---+---+---+---+
       
  1430 | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
       
  1431 +---+---+---+---+---+---+---+---+---+---+
       
  1432 | 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
       
  1433 +---+---+---+---+---+---+---+---+---+---+
       
  1434 | 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
       
  1435 +---+---+---+---+---+---+---+---+---+---+
       
  1436 | 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
       
  1437 +---+---+---+---+---+---+---+---+---+---+
       
  1438 | 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
       
  1439 +---+---+---+---+---+---+---+---+---+---+
       
  1440 Solution 2
       
  1441 +---+---+---+---+---+---+---+---+---+---+
       
  1442 |  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
       
  1443 +---+---+---+---+---+---+---+---+---+---+
       
  1444 | 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
       
  1445 +---+---+---+---+---+---+---+---+---+---+
       
  1446 | 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
       
  1447 +---+---+---+---+---+---+---+---+---+---+
       
  1448 | 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
       
  1449 +---+---+---+---+---+---+---+---+---+---+
       
  1450 | 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
       
  1451 +---+---+---+---+---+---+---+---+---+---+
       
  1452 | 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
       
  1453 +---+---+---+---+---+---+---+---+---+---+
       
  1454 | 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
       
  1455 +---+---+---+---+---+---+---+---+---+---+
       
  1456 | 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
       
  1457 +---+---+---+---+---+---+---+---+---+---+
       
  1458 | 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
       
  1459 +---+---+---+---+---+---+---+---+---+---+
       
  1460 | 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
       
  1461 +---+---+---+---+---+---+---+---+---+---+
       
  1462 """
       
  1463 
       
  1464 weakref_tests = """\
       
  1465 Generators are weakly referencable:
       
  1466 
       
  1467 >>> import weakref
       
  1468 >>> def gen():
       
  1469 ...     yield 'foo!'
       
  1470 ...
       
  1471 >>> wr = weakref.ref(gen)
       
  1472 >>> wr() is gen
       
  1473 True
       
  1474 >>> p = weakref.proxy(gen)
       
  1475 
       
  1476 Generator-iterators are weakly referencable as well:
       
  1477 
       
  1478 >>> gi = gen()
       
  1479 >>> wr = weakref.ref(gi)
       
  1480 >>> wr() is gi
       
  1481 True
       
  1482 >>> p = weakref.proxy(gi)
       
  1483 >>> list(p)
       
  1484 ['foo!']
       
  1485 
       
  1486 """
       
  1487 
       
  1488 coroutine_tests = """\
       
  1489 Sending a value into a started generator:
       
  1490 
       
  1491 >>> def f():
       
  1492 ...     print (yield 1)
       
  1493 ...     yield 2
       
  1494 >>> g = f()
       
  1495 >>> g.next()
       
  1496 1
       
  1497 >>> g.send(42)
       
  1498 42
       
  1499 2
       
  1500 
       
  1501 Sending a value into a new generator produces a TypeError:
       
  1502 
       
  1503 >>> f().send("foo")
       
  1504 Traceback (most recent call last):
       
  1505 ...
       
  1506 TypeError: can't send non-None value to a just-started generator
       
  1507 
       
  1508 
       
  1509 Yield by itself yields None:
       
  1510 
       
  1511 >>> def f(): yield
       
  1512 >>> list(f())
       
  1513 [None]
       
  1514 
       
  1515 
       
  1516 
       
  1517 An obscene abuse of a yield expression within a generator expression:
       
  1518 
       
  1519 >>> list((yield 21) for i in range(4))
       
  1520 [21, None, 21, None, 21, None, 21, None]
       
  1521 
       
  1522 And a more sane, but still weird usage:
       
  1523 
       
  1524 >>> def f(): list(i for i in [(yield 26)])
       
  1525 >>> type(f())
       
  1526 <type 'generator'>
       
  1527 
       
  1528 
       
  1529 A yield expression with augmented assignment.
       
  1530 
       
  1531 >>> def coroutine(seq):
       
  1532 ...     count = 0
       
  1533 ...     while count < 200:
       
  1534 ...         count += yield
       
  1535 ...         seq.append(count)
       
  1536 >>> seq = []
       
  1537 >>> c = coroutine(seq)
       
  1538 >>> c.next()
       
  1539 >>> print seq
       
  1540 []
       
  1541 >>> c.send(10)
       
  1542 >>> print seq
       
  1543 [10]
       
  1544 >>> c.send(10)
       
  1545 >>> print seq
       
  1546 [10, 20]
       
  1547 >>> c.send(10)
       
  1548 >>> print seq
       
  1549 [10, 20, 30]
       
  1550 
       
  1551 
       
  1552 Check some syntax errors for yield expressions:
       
  1553 
       
  1554 >>> f=lambda: (yield 1),(yield 2)
       
  1555 Traceback (most recent call last):
       
  1556   ...
       
  1557 SyntaxError: 'yield' outside function (<doctest test.test_generators.__test__.coroutine[21]>, line 1)
       
  1558 
       
  1559 >>> def f(): return lambda x=(yield): 1
       
  1560 Traceback (most recent call last):
       
  1561   ...
       
  1562 SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.coroutine[22]>, line 1)
       
  1563 
       
  1564 >>> def f(): x = yield = y
       
  1565 Traceback (most recent call last):
       
  1566   ...
       
  1567 SyntaxError: assignment to yield expression not possible (<doctest test.test_generators.__test__.coroutine[23]>, line 1)
       
  1568 
       
  1569 >>> def f(): (yield bar) = y
       
  1570 Traceback (most recent call last):
       
  1571   ...
       
  1572 SyntaxError: can't assign to yield expression (<doctest test.test_generators.__test__.coroutine[24]>, line 1)
       
  1573 
       
  1574 >>> def f(): (yield bar) += y
       
  1575 Traceback (most recent call last):
       
  1576   ...
       
  1577 SyntaxError: augmented assignment to yield expression not possible (<doctest test.test_generators.__test__.coroutine[25]>, line 1)
       
  1578 
       
  1579 
       
  1580 Now check some throw() conditions:
       
  1581 
       
  1582 >>> def f():
       
  1583 ...     while True:
       
  1584 ...         try:
       
  1585 ...             print (yield)
       
  1586 ...         except ValueError,v:
       
  1587 ...             print "caught ValueError (%s)" % (v),
       
  1588 >>> import sys
       
  1589 >>> g = f()
       
  1590 >>> g.next()
       
  1591 
       
  1592 >>> g.throw(ValueError) # type only
       
  1593 caught ValueError ()
       
  1594 
       
  1595 >>> g.throw(ValueError("xyz"))  # value only
       
  1596 caught ValueError (xyz)
       
  1597 
       
  1598 >>> g.throw(ValueError, ValueError(1))   # value+matching type
       
  1599 caught ValueError (1)
       
  1600 
       
  1601 >>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
       
  1602 caught ValueError (1)
       
  1603 
       
  1604 >>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
       
  1605 caught ValueError (1)
       
  1606 
       
  1607 >>> g.throw(ValueError(1), "foo")       # bad args
       
  1608 Traceback (most recent call last):
       
  1609   ...
       
  1610 TypeError: instance exception may not have a separate value
       
  1611 
       
  1612 >>> g.throw(ValueError, "foo", 23)      # bad args
       
  1613 Traceback (most recent call last):
       
  1614   ...
       
  1615 TypeError: throw() third argument must be a traceback object
       
  1616 
       
  1617 >>> def throw(g,exc):
       
  1618 ...     try:
       
  1619 ...         raise exc
       
  1620 ...     except:
       
  1621 ...         g.throw(*sys.exc_info())
       
  1622 >>> throw(g,ValueError) # do it with traceback included
       
  1623 caught ValueError ()
       
  1624 
       
  1625 >>> g.send(1)
       
  1626 1
       
  1627 
       
  1628 >>> throw(g,TypeError)  # terminate the generator
       
  1629 Traceback (most recent call last):
       
  1630   ...
       
  1631 TypeError
       
  1632 
       
  1633 >>> print g.gi_frame
       
  1634 None
       
  1635 
       
  1636 >>> g.send(2)
       
  1637 Traceback (most recent call last):
       
  1638   ...
       
  1639 StopIteration
       
  1640 
       
  1641 >>> g.throw(ValueError,6)       # throw on closed generator
       
  1642 Traceback (most recent call last):
       
  1643   ...
       
  1644 ValueError: 6
       
  1645 
       
  1646 >>> f().throw(ValueError,7)     # throw on just-opened generator
       
  1647 Traceback (most recent call last):
       
  1648   ...
       
  1649 ValueError: 7
       
  1650 
       
  1651 >>> f().throw("abc")     # throw on just-opened generator
       
  1652 Traceback (most recent call last):
       
  1653   ...
       
  1654 TypeError: exceptions must be classes, or instances, not str
       
  1655 
       
  1656 Now let's try closing a generator:
       
  1657 
       
  1658 >>> def f():
       
  1659 ...     try: yield
       
  1660 ...     except GeneratorExit:
       
  1661 ...         print "exiting"
       
  1662 
       
  1663 >>> g = f()
       
  1664 >>> g.next()
       
  1665 >>> g.close()
       
  1666 exiting
       
  1667 >>> g.close()  # should be no-op now
       
  1668 
       
  1669 >>> f().close()  # close on just-opened generator should be fine
       
  1670 
       
  1671 >>> def f(): yield      # an even simpler generator
       
  1672 >>> f().close()         # close before opening
       
  1673 >>> g = f()
       
  1674 >>> g.next()
       
  1675 >>> g.close()           # close normally
       
  1676 
       
  1677 And finalization:
       
  1678 
       
  1679 >>> def f():
       
  1680 ...     try: yield
       
  1681 ...     finally:
       
  1682 ...         print "exiting"
       
  1683 
       
  1684 >>> g = f()
       
  1685 >>> g.next()
       
  1686 >>> del g
       
  1687 exiting
       
  1688 
       
  1689 
       
  1690 GeneratorExit is not caught by except Exception:
       
  1691 
       
  1692 >>> def f():
       
  1693 ...     try: yield
       
  1694 ...     except Exception: print 'except'
       
  1695 ...     finally: print 'finally'
       
  1696 
       
  1697 >>> g = f()
       
  1698 >>> g.next()
       
  1699 >>> del g
       
  1700 finally
       
  1701 
       
  1702 
       
  1703 Now let's try some ill-behaved generators:
       
  1704 
       
  1705 >>> def f():
       
  1706 ...     try: yield
       
  1707 ...     except GeneratorExit:
       
  1708 ...         yield "foo!"
       
  1709 >>> g = f()
       
  1710 >>> g.next()
       
  1711 >>> g.close()
       
  1712 Traceback (most recent call last):
       
  1713   ...
       
  1714 RuntimeError: generator ignored GeneratorExit
       
  1715 >>> g.close()
       
  1716 
       
  1717 
       
  1718 Our ill-behaved code should be invoked during GC:
       
  1719 
       
  1720 >>> import sys, StringIO
       
  1721 >>> old, sys.stderr = sys.stderr, StringIO.StringIO()
       
  1722 >>> g = f()
       
  1723 >>> g.next()
       
  1724 >>> del g
       
  1725 >>> sys.stderr.getvalue().startswith(
       
  1726 ...     "Exception RuntimeError: 'generator ignored GeneratorExit' in "
       
  1727 ... )
       
  1728 True
       
  1729 >>> sys.stderr = old
       
  1730 
       
  1731 
       
  1732 And errors thrown during closing should propagate:
       
  1733 
       
  1734 >>> def f():
       
  1735 ...     try: yield
       
  1736 ...     except GeneratorExit:
       
  1737 ...         raise TypeError("fie!")
       
  1738 >>> g = f()
       
  1739 >>> g.next()
       
  1740 >>> g.close()
       
  1741 Traceback (most recent call last):
       
  1742   ...
       
  1743 TypeError: fie!
       
  1744 
       
  1745 
       
  1746 Ensure that various yield expression constructs make their
       
  1747 enclosing function a generator:
       
  1748 
       
  1749 >>> def f(): x += yield
       
  1750 >>> type(f())
       
  1751 <type 'generator'>
       
  1752 
       
  1753 >>> def f(): x = yield
       
  1754 >>> type(f())
       
  1755 <type 'generator'>
       
  1756 
       
  1757 >>> def f(): lambda x=(yield): 1
       
  1758 >>> type(f())
       
  1759 <type 'generator'>
       
  1760 
       
  1761 >>> def f(): x=(i for i in (yield) if (yield))
       
  1762 >>> type(f())
       
  1763 <type 'generator'>
       
  1764 
       
  1765 >>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
       
  1766 >>> data = [1,2]
       
  1767 >>> g = f(data)
       
  1768 >>> type(g)
       
  1769 <type 'generator'>
       
  1770 >>> g.send(None)
       
  1771 'a'
       
  1772 >>> data
       
  1773 [1, 2]
       
  1774 >>> g.send(0)
       
  1775 'b'
       
  1776 >>> data
       
  1777 [27, 2]
       
  1778 >>> try: g.send(1)
       
  1779 ... except StopIteration: pass
       
  1780 >>> data
       
  1781 [27, 27]
       
  1782 
       
  1783 """
       
  1784 
       
  1785 refleaks_tests = """
       
  1786 Prior to adding cycle-GC support to itertools.tee, this code would leak
       
  1787 references. We add it to the standard suite so the routine refleak-tests
       
  1788 would trigger if it starts being uncleanable again.
       
  1789 
       
  1790 >>> import itertools
       
  1791 >>> def leak():
       
  1792 ...     class gen:
       
  1793 ...         def __iter__(self):
       
  1794 ...             return self
       
  1795 ...         def next(self):
       
  1796 ...             return self.item
       
  1797 ...     g = gen()
       
  1798 ...     head, tail = itertools.tee(g)
       
  1799 ...     g.item = head
       
  1800 ...     return head
       
  1801 >>> it = leak()
       
  1802 
       
  1803 Make sure to also test the involvement of the tee-internal teedataobject,
       
  1804 which stores returned items.
       
  1805 
       
  1806 >>> item = it.next()
       
  1807 
       
  1808 
       
  1809 
       
  1810 This test leaked at one point due to generator finalization/destruction.
       
  1811 It was copied from Lib/test/leakers/test_generator_cycle.py before the file
       
  1812 was removed.
       
  1813 
       
  1814 >>> def leak():
       
  1815 ...    def gen():
       
  1816 ...        while True:
       
  1817 ...            yield g
       
  1818 ...    g = gen()
       
  1819 
       
  1820 >>> leak()
       
  1821 
       
  1822 
       
  1823 
       
  1824 This test isn't really generator related, but rather exception-in-cleanup
       
  1825 related. The coroutine tests (above) just happen to cause an exception in
       
  1826 the generator's __del__ (tp_del) method. We can also test for this
       
  1827 explicitly, without generators. We do have to redirect stderr to avoid
       
  1828 printing warnings and to doublecheck that we actually tested what we wanted
       
  1829 to test.
       
  1830 
       
  1831 >>> import sys, StringIO
       
  1832 >>> old = sys.stderr
       
  1833 >>> try:
       
  1834 ...     sys.stderr = StringIO.StringIO()
       
  1835 ...     class Leaker:
       
  1836 ...         def __del__(self):
       
  1837 ...             raise RuntimeError
       
  1838 ...
       
  1839 ...     l = Leaker()
       
  1840 ...     del l
       
  1841 ...     err = sys.stderr.getvalue().strip()
       
  1842 ...     err.startswith(
       
  1843 ...         "Exception RuntimeError: RuntimeError() in <"
       
  1844 ...     )
       
  1845 ...     err.endswith("> ignored")
       
  1846 ...     len(err.splitlines())
       
  1847 ... finally:
       
  1848 ...     sys.stderr = old
       
  1849 True
       
  1850 True
       
  1851 1
       
  1852 
       
  1853 
       
  1854 
       
  1855 These refleak tests should perhaps be in a testfile of their own,
       
  1856 test_generators just happened to be the test that drew these out.
       
  1857 
       
  1858 """
       
  1859 
       
  1860 __test__ = {"tut":      tutorial_tests,
       
  1861             "pep":      pep_tests,
       
  1862             "email":    email_tests,
       
  1863             "fun":      fun_tests,
       
  1864             "syntax":   syntax_tests,
       
  1865             "conjoin":  conjoin_tests,
       
  1866             "weakref":  weakref_tests,
       
  1867             "coroutine":  coroutine_tests,
       
  1868             "refleaks": refleaks_tests,
       
  1869             }
       
  1870 
       
  1871 # Magic test name that regrtest.py invokes *after* importing this module.
       
  1872 # This worms around a bootstrap problem.
       
  1873 # Note that doctest and regrtest both look in sys.argv for a "-v" argument,
       
  1874 # so this works as expected in both ways of running regrtest.
       
  1875 def test_main(verbose=None):
       
  1876     from test import test_support, test_generators
       
  1877     test_support.run_doctest(test_generators, verbose)
       
  1878 
       
  1879 # This part isn't needed for regrtest, but for running the test directly.
       
  1880 if __name__ == "__main__":
       
  1881     test_main(1)