Lichen

pyparser/test/test_metaparser.py

716:1f9bb1de08f5
2017-03-12 Paul Boddie Merged changes from the default branch. return-value-definition
     1 import py     2 import os     3 import glob     4 import tokenize     5 import token     6 import StringIO     7 from pyparser.metaparser import ParserGenerator, PgenError     8 from pyparser.pygram import PythonGrammar     9 from pyparser import parser    10     11     12 class MyGrammar(parser.Grammar):    13     TOKENS = token.__dict__    14     OPERATOR_MAP = {    15         "+" : token.OP,    16         "-" : token.OP,    17         }    18     KEYWORD_TOKEN = token.NAME    19     20     21 class TestParserGenerator:    22     23     def gram_for(self, grammar_source):    24         p = ParserGenerator(grammar_source + "\n")    25         return p.build_grammar(MyGrammar)    26     27     def test_multiple_rules(self):    28         g = self.gram_for("foo: NAME bar\nbar: STRING")    29         assert len(g.dfas) == 2    30         assert g.start == g.symbol_ids["foo"]    31     32     def test_simple(self):    33         g = self.gram_for("eval: NAME\n")    34         assert len(g.dfas) == 1    35         eval_sym = g.symbol_ids["eval"]    36         assert g.start == eval_sym    37         states, first = g.dfas[eval_sym - 256]    38         assert states == [([(1, 1)], False), ([], True)]    39         assert g.labels[0] == 0    40     41     def test_load_python_grammars(self):    42         gram_pat = os.path.join(os.path.dirname(__file__), "..", "data",    43                                 "Grammar*")    44         for gram_file in glob.glob(gram_pat):    45             fp = open(gram_file, "r")    46             try:    47                 ParserGenerator(fp.read()).build_grammar(PythonGrammar)    48             finally:    49                 fp.close()    50     51     def test_items(self):    52         g = self.gram_for("foo: NAME STRING OP '+'")    53         assert len(g.dfas) == 1    54         states = g.dfas[g.symbol_ids["foo"] - 256][0]    55         last = states[0][0][0][1]    56         for state in states[1:-1]:    57             assert last < state[0][0][1]    58             last = state[0][0][1]    59     60     def test_alternatives(self):    61         g = self.gram_for("foo: STRING | OP")    62         assert len(g.dfas) == 1    63     64     def test_optional(self):    65         g = self.gram_for("foo: [NAME]")    66     67     def test_grouping(self):    68         g = self.gram_for("foo: (NAME | STRING) OP")    69     70     def test_keyword(self):    71         g = self.gram_for("foo: 'some_keyword' 'for'")    72         assert len(g.keyword_ids) == 2    73         assert len(g.token_ids) == 0    74     75     def test_token(self):    76         g = self.gram_for("foo: NAME")    77         assert len(g.token_ids) == 1    78     79     def test_operator(self):    80         g = self.gram_for("add: NUMBER '+' NUMBER")    81         assert len(g.keyword_ids) == 0    82         assert len(g.token_ids) == 2    83     84         exc = py.test.raises(PgenError, self.gram_for, "add: '/'").value    85         assert str(exc) == "no such operator: '/'"    86     87     def test_symbol(self):    88         g = self.gram_for("foo: some_other_rule\nsome_other_rule: NAME")    89         assert len(g.dfas) == 2    90         assert len(g.labels) == 3    91     92         exc = py.test.raises(PgenError, self.gram_for, "foo: no_rule").value    93         assert str(exc) == "no such rule: 'no_rule'"    94     95     def test_repeaters(self):    96         g1 = self.gram_for("foo: NAME+")    97         g2 = self.gram_for("foo: NAME*")    98         assert g1.dfas != g2.dfas    99    100         g = self.gram_for("foo: (NAME | STRING)*")   101         g = self.gram_for("foo: (NAME | STRING)+")   102    103     def test_error(self):   104         exc = py.test.raises(PgenError, self.gram_for, "hi").value   105         assert str(exc) == "expected token OP but got NEWLINE"   106         assert exc.location == ((1, 2), (1, 3), "hi\n")   107         exc = py.test.raises(PgenError, self.gram_for, "hi+").value   108         assert str(exc) == "expected ':' but got '+'"   109         assert exc.location == ((1, 2), (1, 3), "hi+\n")   110    111     def test_comments_and_whitespace(self):   112         self.gram_for("\n\n# comment\nrule: NAME # comment")