Map / Reduce operators in addition to policies
diff --git a/demo.py b/demo.py
index d4c03af..9addd7e 100644
--- a/demo.py
+++ b/demo.py
@@ -47,6 +47,15 @@
extensions 500 to max;
}"""
-p5 = '''policy foo <exists foo: foo.x=foo.y>'''
+test4 = '''policy foo <exists foo: foo.x=foo.y>'''
parser = plyproto.ProtobufAnalyzer()
-print(parser.parse_string(p5))
+
+tests = globals()
+
+for t in tests:
+ import pdb
+ pdb.set_trace()
+ if t.startswith('test'):
+ print 'parsin %s'%t
+ parser.parse_string(globals()[t])
+
diff --git a/plyxproto/model.py b/plyxproto/model.py
index e2b0bf8..832c17a 100644
--- a/plyxproto/model.py
+++ b/plyxproto/model.py
@@ -121,6 +121,36 @@
self.v(self.name, visitor)
self.v(self.fieldId, visitor)
+class ReduceDefinition(SourceElement):
+ def __init__(self, name, body, linespan=None, lexspan=None, p=None):
+ super(ReduceDefinition, self).__init__(linespan=linespan, lexspan=lexspan, p=p)
+ self._fields += ['name', 'body']
+ self.name = name
+ Base.p(self.name, self)
+ self.body = body
+ Base.p(self.body, self)
+
+ def accept(self, visitor):
+ if visitor.visit_EnumDefinition(self):
+ self.v(self.name, visitor)
+ self.v(self.body, visitor)
+
+
+class MapDefinition(SourceElement):
+ def __init__(self, name, body, linespan=None, lexspan=None, p=None):
+ super(MapDefinition, self).__init__(linespan=linespan, lexspan=lexspan, p=p)
+ self._fields += ['name', 'body']
+ self.name = name
+ Base.p(self.name, self)
+ self.body = body
+ Base.p(self.body, self)
+
+ def accept(self, visitor):
+ if visitor.visit_EnumDefinition(self):
+ self.v(self.name, visitor)
+ self.v(self.body, visitor)
+
+
class PolicyDefinition(SourceElement):
def __init__(self, name, body, linespan=None, lexspan=None, p=None):
super(PolicyDefinition, self).__init__(linespan=linespan, lexspan=lexspan, p=p)
diff --git a/plyxproto/parser.py b/plyxproto/parser.py
index 6736e8a..bb92c8a 100755
--- a/plyxproto/parser.py
+++ b/plyxproto/parser.py
@@ -11,12 +11,16 @@
import pdb
from helpers import LexHelper, LU
from logicparser import FOLParser, FOLLexer
+import ast
+
+class PythonError(Exception):
+ pass
class ProtobufLexer(object):
keywords = ('double', 'float', 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
'fixed32', 'fixed64', 'sfixed32', 'sfixed64', 'bool', 'string', 'bytes',
'message', 'required', 'optional', 'repeated', 'enum', 'extensions', 'max', 'extend',
- 'to', 'package', '_service', 'rpc', 'returns', 'true', 'false', 'option', 'import', 'manytoone', 'manytomany', 'onetoone', 'policy')
+ 'to', 'package', '_service', 'rpc', 'returns', 'true', 'false', 'option', 'import', 'manytoone', 'manytomany', 'onetoone', 'policy', 'map', 'reduce')
tokens = [
'POLICYBODY',
@@ -175,7 +179,6 @@
'''policy_opt : DOUBLECOLON NAME'''
p[0] = p[2]
-
def p_policy_opt_empty(self, p):
'''policy_opt : empty'''
p[0] = None
@@ -305,6 +308,30 @@
'''enum_body_opt : enum_body'''
p[0] = p[1]
+ def p_reduce_definition(self, p):
+ '''reduce_definition : REDUCE NAME POLICYBODY'''
+ ltxt = p[3].lstrip('<').rstrip('>')
+ l = ast.parse(ltxt).body[0]
+ if not isinstance(l, ast.Expr):
+ raise PythonError("reduce operator needs to be an expression")
+ elif not isinstance(l.value, ast.Lambda):
+ raise PythonError("reduce operator needs to be a lambda")
+
+ p[0] = ReduceDefinition(Name(LU.i(p, 2)), ltxt)
+ self.lh.set_parse_object(p[0], p)
+
+ def p_map_definition(self, p):
+ '''map_definition : MAP NAME POLICYBODY'''
+ ltxt = p[3].lstrip('<').rstrip('>')
+ l = ast.parse(ltxt).body[0]
+ if not isinstance(l, ast.Expr):
+ raise PythonError("map operator needs to be an expression")
+ elif not isinstance(l.value, ast.Lambda):
+ raise PythonError("map operator needs to be a lambda")
+
+ p[0] = MapDefinition(Name(LU.i(p, 2)), ltxt)
+ self.lh.set_parse_object(p[0], p)
+
def p_policy_definition(self, p):
'''policy_definition : POLICY NAME POLICYBODY'''
fol = self.fol_parser.parse(p[3], lexer = self.fol_lexer)