paul@437 | 1 | import os |
paul@437 | 2 | from pyparser import parser, pytoken, metaparser |
paul@437 | 3 | |
paul@437 | 4 | class PythonGrammar(parser.Grammar): |
paul@437 | 5 | |
paul@437 | 6 | KEYWORD_TOKEN = pytoken.python_tokens["NAME"] |
paul@437 | 7 | TOKENS = pytoken.python_tokens |
paul@437 | 8 | OPERATOR_MAP = pytoken.python_opmap |
paul@437 | 9 | |
paul@437 | 10 | def _get_python_grammar(): |
paul@437 | 11 | here = os.path.dirname(__file__) |
paul@437 | 12 | fp = open(os.path.join(here, "data", "Grammar2.7")) |
paul@437 | 13 | try: |
paul@437 | 14 | gram_source = fp.read() |
paul@437 | 15 | finally: |
paul@437 | 16 | fp.close() |
paul@437 | 17 | pgen = metaparser.ParserGenerator(gram_source) |
paul@437 | 18 | return pgen.build_grammar(PythonGrammar) |
paul@437 | 19 | |
paul@437 | 20 | |
paul@437 | 21 | python_grammar = _get_python_grammar() |
paul@437 | 22 | python_grammar_no_print = python_grammar.shared_copy() |
paul@437 | 23 | python_grammar_no_print.keyword_ids = python_grammar_no_print.keyword_ids.copy() |
paul@437 | 24 | del python_grammar_no_print.keyword_ids["print"] |
paul@437 | 25 | |
paul@437 | 26 | class _Tokens(object): |
paul@437 | 27 | pass |
paul@437 | 28 | |
paul@437 | 29 | for tok_name, idx in pytoken.python_tokens.iteritems(): |
paul@437 | 30 | setattr(_Tokens, tok_name, idx) |
paul@437 | 31 | tokens = _Tokens() |
paul@437 | 32 | |
paul@437 | 33 | class _Symbols(object): |
paul@437 | 34 | pass |
paul@437 | 35 | rev_lookup = {} |
paul@437 | 36 | for sym_name, idx in python_grammar.symbol_ids.iteritems(): |
paul@437 | 37 | setattr(_Symbols, sym_name, idx) |
paul@437 | 38 | rev_lookup[idx] = sym_name |
paul@437 | 39 | syms = _Symbols() |
paul@437 | 40 | syms._rev_lookup = rev_lookup # for debugging |
paul@437 | 41 | syms.sym_name = rev_lookup # for symbol module compatibility |
paul@437 | 42 | |
paul@437 | 43 | del _get_python_grammar, _Tokens, tok_name, sym_name, idx |