]> git.phdru.name Git - phdru.name/cgi-bin/blog-ru/search-tags.git/blobdiff - parser/parser.py
Use grako instead of PLY to compile EBNF to Python
[phdru.name/cgi-bin/blog-ru/search-tags.git] / parser / parser.py
old mode 100644 (file)
new mode 100755 (executable)
index e214698..460a552
-# Parse query
-
-from ply import lex, yacc
-
-literals = '()'
-
-tokens = ('OP_WORD', 'NAME', 'AND_OP', 'OR_OP', 'NOT_OP', 'SP1')
-
-t_OP_WORD = '(AND|and|OR|or|NOT|not)'
-
-t_NAME = '([a-z][a-z0-9_]*)'
-
-t_AND_OP = '&'
-
-t_OR_OP = r'\|'
-
-t_NOT_OP = '!'
-
-t_SP1 = '[ \t]+'
-
-def t_error(t):
-    """Avoid warnings on stderr"""
-
-lexer = lex.lex()
-
-def p_expression_name(p):
-    """expression : NAME"""
-    p[0] = ('NAME', p[1])
-
-def p_expression_and_and(p):
-    """expression : expression SP0 AND_OP AND_OP SP0 expression"""
-    p[0] = ('AND', p[1], p[6])
-
-def p_expression_and(p):
-    """expression : expression SP0 AND_OP SP0 expression"""
-    p[0] = ('AND', p[1], p[5])
-
-def p_expression_op_word(p):
-    """expression : l_expression op_word r_expression"""
-    if p[2] in ('AND', 'and'):
-        p[0] = ('AND', p[1], p[3])
-    elif p[2] in ('OR', 'or'):
-        p[0] = ('OR', p[1], p[3])
-    else:
-        raise ValueError(p)
-
-def p_expression_or_or(p):
-    """expression : expression SP0 OR_OP OR_OP SP0 expression"""
-    p[0] = ('OR', p[1], p[6])
-
-def p_expression_or(p):
-    """expression : expression SP0 OR_OP SP0 expression"""
-    p[0] = ('OR', p[1], p[5])
-
-def p_expression_not(p):
-    """expression : NOT_OP SP0 expression"""
-    p[0] = ('NOT', p[3])
-
-def p_expression_not_word(p):
-    """expression : op_word r_expression"""
-    if p[1] in ('NOT', 'not'):
-        p[0] = ('NOT', p[2])
-    else:
-        raise ValueError(p)
-
-def p_expression_in_parens(p):
-    """expression : expression_parens"""
-    p[0] = p[1]
-
-def p_l_expression(p):
-    """l_expression : expression_parens
-                    | expression SP1
-    """
-    if len(p) == 2:
-        p[0] = p[1]
-    elif len(p) == 3:
-        p[0] = p[1]
-    else:
-        raise ValueError(p)
-
-def p_r_expression(p):
-    """r_expression : expression_parens
-                    | SP1 expression
-    """
-    if len(p) == 2:
-        p[0] = p[1]
-    elif len(p) == 3:
-        p[0] = p[2]
-    else:
-        raise ValueError(p)
-
-def p_expression_parens(p):
-    """expression_parens : '(' SP0 expression SP0 ')'"""
-    p[0] = ('PARENS', p[3])
-
-def p_op_word(p):
-    """op_word : OP_WORD"""
-    if p[1] in ('AND', 'and', 'OR', 'or', 'NOT', 'not'):
-        p[0] = p[1]
-    else:
-        raise SyntaxError
-
-def p_SP0(p):
-    """SP0 : SP1
-           | empty
-    """
-
-def p_empty(p):
-    """empty :"""
-
-def p_error(p):
-    """Avoid warnings on stderr"""
-    yacc.restart()
-
-precedence = (
-    ('left', 'OR_OP'),
-    ('left', 'AND_OP'),
-    ('right', 'NOT_OP'),
-)
-
-parser = yacc.yacc()
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# CAVEAT UTILITOR
+#
+# This file was automatically generated by Grako.
+#
+#    https://pypi.python.org/pypi/grako/
+#
+# Any changes you make to it will be overwritten the next time
+# the file is generated.
+
+
+from __future__ import print_function, division, absolute_import, unicode_literals
+
+from grako.parsing import graken, Parser
+from grako.util import re, RE_FLAGS, generic_main  # noqa
+
+
+__version__ = (2016, 7, 9, 18, 10, 4, 5)
+
+__all__ = [
+    'TagsParser',
+    'TagsSemantics',
+    'main'
+]
+
+KEYWORDS = set([])
+
+
+class TagsParser(Parser):
+    def __init__(self,
+                 whitespace=None,
+                 nameguard=None,
+                 comments_re=None,
+                 eol_comments_re=None,
+                 ignorecase=None,
+                 left_recursion=True,
+                 keywords=KEYWORDS,
+                 namechars='',
+                 **kwargs):
+        super(TagsParser, self).__init__(
+            whitespace=whitespace,
+            nameguard=nameguard,
+            comments_re=comments_re,
+            eol_comments_re=eol_comments_re,
+            ignorecase=ignorecase,
+            left_recursion=left_recursion,
+            keywords=keywords,
+            namechars=namechars,
+            **kwargs
+        )
+
+    @graken()
+    def _start_(self):
+        self._expression_()
+        self._check_eof()
+
+    @graken()
+    def _expression_(self):
+        with self._group():
+            with self._choice():
+                with self._option():
+                    self._expression_()
+                    self._and_op_()
+                    self._expression_()
+                with self._option():
+                    self._expression_()
+                    self._or_op_()
+                    self._expression_()
+                with self._option():
+                    self._not_op_()
+                    self._expression_()
+                with self._option():
+                    self._expression_parens_()
+                with self._option():
+                    self._name_()
+                self._error('no available options')
+
+    @graken()
+    def _expression_parens_(self):
+        self._token('(')
+        self._expression_()
+        self._token(')')
+
+    @graken()
+    def _name_(self):
+        self._pattern(r'[a-z][a-z0-9_]+')
+
+    @graken()
+    def _and_op_(self):
+        with self._choice():
+            with self._option():
+                self._token('&')
+            with self._option():
+                self._token('&&')
+            with self._option():
+                self._token('AND')
+            with self._option():
+                self._token('and')
+            self._error('expecting one of: & && AND and')
+
+    @graken()
+    def _or_op_(self):
+        with self._choice():
+            with self._option():
+                self._token('|')
+            with self._option():
+                self._token('||')
+            with self._option():
+                self._token('OR')
+            with self._option():
+                self._token('or')
+            self._error('expecting one of: OR or | ||')
+
+    @graken()
+    def _not_op_(self):
+        with self._choice():
+            with self._option():
+                self._token('!')
+            with self._option():
+                self._token('NOT')
+            with self._option():
+                self._token('not')
+            self._error('expecting one of: ! NOT not')
+
+
+class TagsSemantics(object):
+    def start(self, ast):
+        return ast
+
+    def expression(self, ast):
+        return ast
+
+    def expression_parens(self, ast):
+        return ast
+
+    def name(self, ast):
+        return ast
+
+    def and_op(self, ast):
+        return ast
+
+    def or_op(self, ast):
+        return ast
+
+    def not_op(self, ast):
+        return ast
+
+
+def main(
+        filename,
+        startrule,
+        trace=False,
+        whitespace=None,
+        nameguard=None,
+        comments_re=None,
+        eol_comments_re=None,
+        ignorecase=None,
+        left_recursion=True,
+        **kwargs):
+
+    with open(filename) as f:
+        text = f.read()
+    whitespace = whitespace or None
+    parser = TagsParser(parseinfo=False)
+    ast = parser.parse(
+        text,
+        startrule,
+        filename=filename,
+        trace=trace,
+        whitespace=whitespace,
+        nameguard=nameguard,
+        ignorecase=ignorecase,
+        **kwargs)
+    return ast
+
+if __name__ == '__main__':
+    import json
+    ast = generic_main(main, TagsParser, name='Tags')
+    print('AST:')
+    print(ast)
+    print()
+    print('JSON:')
+    print(json.dumps(ast, indent=2))
+    print()