| 1 | #!/usr/bin/env python2
 | 
| 2 | from __future__ import print_function
 | 
| 3 | """
 | 
| 4 | generator_exception.py
 | 
| 5 | """
 | 
| 6 | 
 | 
| 7 | import sys
 | 
| 8 | 
 | 
| 9 | 
 | 
| 10 | def Tokenize(s):
 | 
| 11 |   for item in ('1', '2', '3'):
 | 
| 12 |     yield item
 | 
| 13 | 
 | 
| 14 | 
 | 
| 15 | class TokenizeClass(object):  # NOT a generator
 | 
| 16 |   def __init__(self, s):
 | 
| 17 |     self.s = s
 | 
| 18 |     self.i = 1
 | 
| 19 | 
 | 
| 20 |   def next(self):
 | 
| 21 |     if self.i == 4:
 | 
| 22 |       raise StopIteration()
 | 
| 23 |     ret = str(self.i)
 | 
| 24 |     self.i += 1
 | 
| 25 |     return ret
 | 
| 26 | 
 | 
| 27 | 
 | 
| 28 | class Parser(object):
 | 
| 29 |   """Recursive TDOP parser."""
 | 
| 30 | 
 | 
| 31 |   def __init__(self, lexer):
 | 
| 32 |     self.lexer = lexer  # iterable
 | 
| 33 |     self.token = None  # current token
 | 
| 34 | 
 | 
| 35 |   def Next(self):
 | 
| 36 |     """Move to the next token."""
 | 
| 37 |     try:
 | 
| 38 |       t = self.lexer.next()
 | 
| 39 |     except StopIteration:
 | 
| 40 |       t = None
 | 
| 41 |     self.token = t
 | 
| 42 | 
 | 
| 43 | 
 | 
| 44 | def main(argv):
 | 
| 45 |   if 1:
 | 
| 46 |     lexer = Tokenize('1+2')  # does NOT work
 | 
| 47 |   else:
 | 
| 48 |     lexer = TokenizeClass('1+2')  # WORKS
 | 
| 49 | 
 | 
| 50 |   p = Parser(lexer)
 | 
| 51 |   p.Next()
 | 
| 52 |   print('Done')
 | 
| 53 | 
 | 
| 54 | 
 | 
| 55 | if __name__ == '__main__':
 | 
| 56 |   try:
 | 
| 57 |     main(sys.argv)
 | 
| 58 |   except RuntimeError as e:
 | 
| 59 |     print('FATAL: %s' % e, file=sys.stderr)
 | 
| 60 |     sys.exit(1)
 |