Improvements to parser, resolution for shift/reduce conflicts
diff --git a/plyxproto/logicparser.py b/plyxproto/logicparser.py
index c778448..c246baf 100644
--- a/plyxproto/logicparser.py
+++ b/plyxproto/logicparser.py
@@ -8,6 +8,11 @@
 
 from helpers import LexHelper, LU
 
+class FOLParsingError(Exception):
+    def __init__(self, message, error_range):
+        super(FOLParsingError, self).__init__(message)
+        self.error_range = error_range
+
 class FOLLexer(object):
     keywords = ('forall', 'exists', 'True', 'False', 'not', 'in')
 
@@ -41,7 +46,10 @@
         r'(\r\n)+'
         t.lexer.lineno += len(t.value) / 2
 
-    t_ESCAPE = r'{{ (.|\n)*? }}'
+    def t_ESCAPE(self, t):
+        r'{{ (.|\n)*? }}'
+        t.lexer.lineno += t.value.count('\n')
+        return t
 
     def t_BLOCK_COMMENT(self, t):
         r'/\*(.|\n)*?\*/'
@@ -124,11 +132,20 @@
                       | EXISTS SYMBOL COLON fole'''
         p[0] = {p[1]: [p[2], p[4]]}
 
-    
     def p_goal(self, p):
         '''goal : LT fole RT'''
         p[0] = p[2]
 
     def p_error(self, p):
-        print('error: {}'.format(p))
+        error = 'error: {}'.format(p)
+        raise FOLParsingError(error, (p.lineno,p.lexpos,len(p.value)))
 
+    precedence = (
+                  ("right", "IMPLIES"),
+                  ("left", "OR"),
+                  ("left", "AND"),
+                  ("right", "COLON"),
+                  ("right", "NOT"),
+                  ("right", "STAR"),
+                  ("right", "ESCAPE"),
+                  ("nonassoc", "EQUALS", "IN"))
diff --git a/plyxproto/parser.py b/plyxproto/parser.py
index f73bd53..6213a07 100755
--- a/plyxproto/parser.py
+++ b/plyxproto/parser.py
@@ -10,12 +10,18 @@
 
 import pdb
 from helpers import LexHelper, LU
-from logicparser import FOLParser, FOLLexer
+from logicparser import FOLParser, FOLLexer, FOLParsingError
 import ast
 
 class PythonError(Exception):
     pass
 
+class ParsingError(Exception):
+    def __init__(self, message, error_range):
+        super(ParsingError, self).__init__(message)
+        self.error_range = error_range
+
+
 class ProtobufLexer(object):
     keywords = ('double', 'float', 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
                 'fixed32', 'fixed64', 'sfixed32', 'sfixed64', 'bool', 'string', 'bytes',
@@ -36,7 +42,10 @@
     ] + [k.upper() for k in keywords]
 
 
-    t_POLICYBODY = r'< (.|\n)*? [^-]>'
+    def t_POLICYBODY(self, t):
+        r'< (.|\n)*? [^-]>'
+        t.lexer.lineno += t.value.count('\n')
+        return t
 
     literals = '()+-*/=?:,.^|&~!=[]{};<>@%'
 
@@ -99,7 +108,7 @@
     offset = 0
     lh = LexHelper()
     fol_lexer = lex.lex(module=FOLLexer())#, optimize=1)
-    fol_parser = yacc.yacc(module=FOLParser(), start='goal')
+    fol_parser = yacc.yacc(module=FOLParser(), start='goal', outputdir='/tmp', debug=0)
 
     def setOffset(self, of):
         self.offset = of
@@ -334,7 +343,11 @@
 
     def p_policy_definition(self, p):
         '''policy_definition : POLICY NAME POLICYBODY'''
-        fol = self.fol_parser.parse(p[3], lexer = self.fol_lexer)
+        try:
+            fol = self.fol_parser.parse(p[3], lexer = self.fol_lexer)
+        except FOLParsingError, e:
+            lineno, lexpos, length = e.error_range
+            raise ParsingError("Policy parsing error in policy %s"%p[2], (p.lineno(3) + lineno,lexpos + p.lexpos(3), length))
         p[0] = PolicyDefinition(Name(LU.i(p, 2)), fol)
         self.lh.set_parse_object(p[0], p)
 
@@ -492,13 +505,13 @@
         p[0] = p[2]
 
     def p_error(self, p):
-        print('error: {}'.format(p))
+        raise ParsingError("Parsing Error", (p.lineno,p.lexpos,len(p.value)))
 
 class ProtobufAnalyzer(object):
 
     def __init__(self):
-        self.lexer = lex.lex(module=ProtobufLexer())#, optimize=1)
-        self.parser = yacc.yacc(module=ProtobufParser(), start='goal', debug=0, outputdir='/tmp')#optimize=1)
+        self.lexer = lex.lex(module=ProtobufLexer())
+        self.parser = yacc.yacc(module=ProtobufParser(), start='goal', debug=0, outputdir='/tmp')
 
     def tokenize_string(self, code):
         self.lexer.input(code)
diff --git a/setup.py b/setup.py
index f600b8d..fd3cf5e 100644
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@
 from setuptools import setup
 
 setup(name='plyxproto',
-      version='2.2.0',
+      version='3.0.0',
       description='xproto parser and processor',
       author='Dusan Klinec (original protobuf parser), Sapan Bhatia (xproto extensions)',
       author_email='sapan@opennetworking.org',