paul@437 | 1 | import os |
paul@437 | 2 | from pyparser import parser, pytoken, metaparser |
paul@437 | 3 | |
paul@437 | 4 | class PythonGrammar(parser.Grammar): |
paul@437 | 5 | |
paul@437 | 6 | KEYWORD_TOKEN = pytoken.python_tokens["NAME"] |
paul@437 | 7 | TOKENS = pytoken.python_tokens |
paul@437 | 8 | OPERATOR_MAP = pytoken.python_opmap |
paul@437 | 9 | |
paul@437 | 10 | def _get_python_grammar(): |
paul@437 | 11 | here = os.path.dirname(__file__) |
paul@443 | 12 | fp = open(os.path.join(here, "data", "Grammar-Lichen")) |
paul@437 | 13 | try: |
paul@437 | 14 | gram_source = fp.read() |
paul@437 | 15 | finally: |
paul@437 | 16 | fp.close() |
paul@437 | 17 | pgen = metaparser.ParserGenerator(gram_source) |
paul@437 | 18 | return pgen.build_grammar(PythonGrammar) |
paul@437 | 19 | |
paul@437 | 20 | |
paul@437 | 21 | python_grammar = _get_python_grammar() |
paul@437 | 22 | |
paul@439 | 23 | # For token module compatibility, expose name-to-index and index-to-name |
paul@439 | 24 | # mappings. |
paul@437 | 25 | |
paul@439 | 26 | tokens = pytoken.python_tokens |
paul@439 | 27 | tok_name = pytoken.python_opmap |
paul@437 | 28 | |
paul@439 | 29 | # For symbol module compatibility, expose name-to-index and index-to-name |
paul@439 | 30 | # mappings. |
paul@437 | 31 | |
paul@439 | 32 | syms = python_grammar.symbol_ids |
paul@439 | 33 | sym_name = {} |
paul@439 | 34 | for name, idx in python_grammar.symbol_ids.iteritems(): |
paul@439 | 35 | sym_name[idx] = name |
paul@439 | 36 | |
paul@439 | 37 | del _get_python_grammar, name, idx |