From 96b71a5123a563562796e3ba2ab54be6a3961ec9 Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Sun, 14 Sep 2014 17:33:37 +0200 Subject: [PATCH 01/14] backend: Semi-working improvement smart parsing This shows how pyparsing could help us parse more complex search queries. pyparsing translates it into an AST that is then parsed through some code to convert it to dictSQL. Proper grouping of operators isn't working, ie 'and' and 'or' have the same priority which leads to incorrect queries. There are probably other issues as well, but it's something :) --- nipap/nipap/smart_parsing.py | 235 +++++++++++++++++++++++++++++++++++ 1 file changed, 235 insertions(+) create mode 100644 nipap/nipap/smart_parsing.py diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py new file mode 100644 index 000000000..7350572bf --- /dev/null +++ b/nipap/nipap/smart_parsing.py @@ -0,0 +1,235 @@ +#!/usr/bin/python + +from itertools import izip_longest +import logging +from pyparsing import Forward, Group, nestedExpr, ParseResults, QuotedString, Word, ZeroOrMore, alphanums, nums, oneOf + +op_text = { + '=': 'equals', + '<': 'is less than' + } + +class SmartParser: + columns = {} + match_operators = ['=', '<', '>', '<=', '=>', '~'] + boolean_operators = ['and', 'or'] + + def __init__(self): + self._logger = logging.getLogger(self.__class__.__name__) + + + def _string_to_ast(self, input_string): + """ Parse a smart search string and return it in an AST like form + """ + + # simple words + word = Word(alphanums + "-.") + # numbers + number = Word(nums) + # operators for matching + match_op = oneOf(' '.join(self.match_operators)) + # quoted string + quoted_string = QuotedString('"', unquoteResults=True, escChar='\\') + # expression to match a certain value for an attribute + expression = Group(word + match_op + (quoted_string | word | number)) + # we work on atoms, which are single words, quoted strings or match expressions + atom = (quoted_string | expression | word ) + + enclosed = Forward() + parens = nestedExpr('(', ')', content=enclosed) + enclosed << ( + atom | parens + ) + + content = Forward() + content << ( + ZeroOrMore(enclosed) + ) + + return content.parseString(input_string) + + + def _ast_to_dictsql(self, ast): + """ + """ + #self._logger.debug("parsing AST: " + str(ast)) + interp = [] + + dse = None + + # dictSql stack + dss = { + 'operator': None, + 'val1': None, + 'val2': None + } + + for part, lookahead in izip_longest(ast, ast[1:]): + #self._logger.debug("part: %s %s" % (part, type(part))) + print "part: %s lookahead: %s" % (part, lookahead) + + # handle operators joining together expressions + if isinstance(part, basestring) and part.lower() in self.boolean_operators: + dss['operator'] = part.lower() + #self._logger.debug("operator part: %s" % part.lower()) + continue + + # string expr that we expand to dictsql expression + elif isinstance(part, basestring): + # dict sql expression + dse = self._string_to_dictsql(part) + #self._logger.debug('string part: %s => %s' % (part, dse)) + elif isinstance(part, ParseResults): + if part[1] in self.match_operators: + dse = self._parse_expr(part) + print dse + else: +# for word in part: +# if str(word).lower() in ('or', 'and'): +# self._logger.debug('AND/ORing: ' + str(word) + str(type(word))) + dse = self._ast_to_dictsql(part) + else: + raise ParserError("Unhandled part in AST: %s" % part) + + print "DSE:", dse + + if lookahead is not None: + if dss['val1'] is not None and dss['val2'] is not None: + print "nesting!" + dss = { + 'operator': None, + 'val1': dss, + 'val2': None + } + + if dss['val1'] is None: + #self._logger.debug('val1 not set, using dse: %s' % str(dse)) + dss['val1'] = dse + else: + #self._logger.debug("val1 is set, operator is '%s', val2 = dst: %s" % (dss['operator'], str(dse))) + dss['val2'] = dse + + + # special handling when AST is only one expression, then we overwrite + # the dss with dse + if len(ast) == 1: + dss = dse + if len(ast) == 0: + dss = self._string_to_dictsql('') + + + # return the final composed stack of dictsql expressions + return dss + + + def _string_to_dictsql(self, string): + """ Do magic matching of single words or quoted string + """ + #self._logger.debug("parsing string: " + str(string)) + #self._logger.debug("Query part '" + string + "' interpreted as text") + + dictsql = { + 'operator': 'or', + 'val1': { + 'operator': 'or', + 'val1': { + 'operator': 'regex_match', + 'val1': 'description', + 'val2': string + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'comment', + 'val2': string + } + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'node', + 'val2': string + }, + 'interpretation': 'text', + 'attribute': 'vrf or name or description' + } + + return dictsql + + + def _parse_expr(self, part): + """ Parse matching expression in form key value + + For example; + vlan > 1 + node = FOO-BAR + """ + self._logger.debug("parsing expression: " + str(part)) + key, op, val = part + + dictsql = { + 'operator': op, + 'val1': key, + 'val2': val, + 'interpretation': 'expression' + } + + return dictsql + + + def _add_implicit_ops(self, input_ast): + """ Add implicit AND operator between expressions if there is no + explicit operator specified. + """ + res_ast = [] + + for token, lookahead in izip_longest(input_ast, input_ast[1:]): + if isinstance(token, str) and token.lower() in self.boolean_operators: + res_ast.append(token) + continue + if isinstance(lookahead, str) and lookahead.lower() in self.boolean_operators: + res_ast.append(token) + continue + res_ast.append(token) + if lookahead is not None: + res_ast.append('and') + + return res_ast + + + + def parse(self, input_string): + raw_ast = self._string_to_ast(input_string) + ast = self._add_implicit_ops(raw_ast) + return self._ast_to_dictsql(ast) + + +class VrfSmartParser(SmartParser): + columns = { + 'rt', + 'name', + 'description', + 'tags' + } + + +class ParserError(Exception): + """ General parser error + """ + +if __name__ == '__main__': + # set logging format + LOG_FORMAT = "%(asctime)s: %(module)-10s %(levelname)-8s %(message)s" + # setup basic logging + logging.basicConfig(format=LOG_FORMAT) + logger = logging.getLogger() + logger.setLevel(logging.DEBUG) + + p = SmartParser() + #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar")') + #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar"))') + import sys + dictsql = p.parse(' '.join(sys.argv[1:])) + import pprint + print "----------" + pp = pprint.PrettyPrinter(indent = 4) + pp.pprint(dictsql) + print "----------" From 4eaa4d20ac6bb8ac9754845aa0040c929292787d Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Thu, 7 May 2015 17:59:14 +0200 Subject: [PATCH 02/14] backend: add interpretation for boolean operators --- nipap/nipap/smart_parsing.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index 7350572bf..3ed307e7d 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -71,6 +71,10 @@ def _ast_to_dictsql(self, ast): # handle operators joining together expressions if isinstance(part, basestring) and part.lower() in self.boolean_operators: dss['operator'] = part.lower() + dss['interpretation'] = { + 'interpretation': part.lower(), + 'operator': part.lower() + } #self._logger.debug("operator part: %s" % part.lower()) continue From b767d2d56e7ef9dd70ee677b51795265f26cc95b Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Thu, 7 May 2015 18:00:12 +0200 Subject: [PATCH 03/14] backend: implement basic PoolSmartParser Since attributes are specific for prefix, vrfs and pools we have a PoolSmartParser that inherits from SmartParser but overrides the function that does smart parsing for string tokens. --- nipap/nipap/smart_parsing.py | 87 ++++++++++++++++++++++++------------ 1 file changed, 59 insertions(+), 28 deletions(-) diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index 3ed307e7d..3854895ad 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -2,6 +2,8 @@ from itertools import izip_longest import logging +import re + from pyparsing import Forward, Group, nestedExpr, ParseResults, QuotedString, Word, ZeroOrMore, alphanums, nums, oneOf op_text = { @@ -129,34 +131,7 @@ def _ast_to_dictsql(self, ast): def _string_to_dictsql(self, string): """ Do magic matching of single words or quoted string """ - #self._logger.debug("parsing string: " + str(string)) - #self._logger.debug("Query part '" + string + "' interpreted as text") - - dictsql = { - 'operator': 'or', - 'val1': { - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': string - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'comment', - 'val2': string - } - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'node', - 'val2': string - }, - 'interpretation': 'text', - 'attribute': 'vrf or name or description' - } - - return dictsql + raise NotImplemented() def _parse_expr(self, part): @@ -215,6 +190,62 @@ class VrfSmartParser(SmartParser): } +class PoolSmartParser(SmartParser): + + def _string_to_dictsql(self, string): + """ Do magic matching of single words or quoted string + """ + self._logger.debug("parsing string: " + str(string)) + #self._logger.debug("Query part '" + string + "' interpreted as text") + + if re.match('#', string): + self._logger.debug("Query part '" + string + "' interpreted as tag") + dictsql = { + 'interpretation': { + 'string': string, + 'interpretation': '(inherited) tag', + 'attribute': 'tag', + 'operator': 'equals_any', + }, + 'operator': 'or', + 'val1': { + 'operator': 'equals_any', + 'val1': 'tags', + 'val2': string[1:] + }, + 'val2': { + 'operator': 'equals_any', + 'val1': 'inherited_tags', + 'val2': string[1:] + } + } + + else: + self._logger.debug("Query part '" + string + "' interpreted as text") + dictsql = { + 'interpretation': { + 'attribute': 'name or description', + 'interpretation': 'text', + 'operator': 'regex', + 'string': string + }, + 'operator': 'or', + 'val1': { + 'operator': 'regex_match', + 'val1': 'name', + 'val2': string + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'description', + 'val2': string + } + } + + return dictsql + + + class ParserError(Exception): """ General parser error """ From 7362cb9bff839ba5bff494e17cdd0568183087a1 Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Thu, 7 May 2015 18:01:26 +0200 Subject: [PATCH 04/14] backend: add operator characters symbols Instead of just having a textual description of each operator we also add symbols for it, like '=' for 'equals'. --- nipap/nipap/backend.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py index 6081aa24d..8c14077ae 100644 --- a/nipap/nipap/backend.py +++ b/nipap/nipap/backend.py @@ -510,20 +510,30 @@ 'and': 'AND', 'or': 'OR', 'equals_any': '= ANY', + '=': '=', 'equals': '=', + '<': '<', 'less': '<', + '<=': '<=', 'less_or_equal': '<=', + '>': '>', 'greater': '>', + '>=': '>=', 'greater_or_equal': '>=', 'is': 'IS', 'is_not': 'IS NOT', + '!=': '!=', 'not_equals': '!=', 'like': 'LIKE', 'regex_match': '~*', 'regex_not_match': '!~*', + '>>': '>>', 'contains': '>>', + '>>=': '>>=', 'contains_equals': '>>=', + '<<': '<<', 'contained_within': '<<', + '<<=': '<<=', 'contained_within_equals': '<<=' } """ Maps operators in a prefix query to SQL operators. From 12deebd9eac69ed05f2b98fe55b09d08baeb0d3e Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Thu, 7 May 2015 18:02:37 +0200 Subject: [PATCH 05/14] backend: use PoolSmartParser for pool searches --- nipap/nipap/backend.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py index 8c14077ae..ed08276ee 100644 --- a/nipap/nipap/backend.py +++ b/nipap/nipap/backend.py @@ -2247,6 +2247,11 @@ def _parse_pool_query(self, query_str): This is a helper function to smart_search_pool for easier unit testing of the parser. """ + from smart_parsing import PoolSmartParser + sp = PoolSmartParser() + query = sp.parse(query_str) + return query + # find query parts query_str_parts = self._get_query_parts(query_str) From b60b3d9fe5c0acc2b5a0c5eb77f9c2089926f985 Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Tue, 14 Jul 2015 21:09:34 +0200 Subject: [PATCH 06/14] Complete basic smart parser Not sure I should call this completed or not but at least it works the way I want for all queries I've tried so far. There are certainly areas to improve still but it's getting there. The smart parser now runs the test suite without failure. Only change to the test suite is the text expected for a search string with an unclosed quote. For each object type (VRF, Pool, Prefix) we have a class that inherits from the main SmartParser class and applies logic specific for the object type so we end up with VrfSmartParser, PoolSmartParser and PrefixSmartParser. For example, only PrefixSmartParser matches on prefixes. Exceptions thrown by the backend are moved to a separate file so it can be imported both into backend.py and smart_parsing.py Fixes #531. --- nipap/nipap/backend.py | 82 +---- nipap/nipap/errors.py | 67 ++++ nipap/nipap/smart_parsing.py | 571 ++++++++++++++++++++++++++++++----- tests/nipaptest.py | 42 ++- 4 files changed, 588 insertions(+), 174 deletions(-) create mode 100644 nipap/nipap/errors.py diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py index ed08276ee..67f383b28 100644 --- a/nipap/nipap/backend.py +++ b/nipap/nipap/backend.py @@ -198,7 +198,9 @@ import re import IPy +from errors import * import authlib +import smart_parsing # support multiple versions of parsedatetime try: @@ -1557,12 +1559,12 @@ def smart_search_vrf(self, auth, query_str, search_options=None, extra_query=Non try: query = self._parse_vrf_query(query_str) - except NipapValueError: + except NipapValueError as exc: return { 'interpretation': [ { 'string': query_str, - 'interpretation': 'unclosed quote', + 'interpretation': exc, 'attribute': 'text' } ], @@ -1592,6 +1594,10 @@ def _parse_vrf_query(self, query_str): This is a helper function to smart_search_vrf for easier unit testing of the parser. """ + sp = smart_parsing.VrfSmartParser() + query = sp.parse(query_str) + return query + # find query parts query_str_parts = self._get_query_parts(query_str) @@ -2247,8 +2253,7 @@ def _parse_pool_query(self, query_str): This is a helper function to smart_search_pool for easier unit testing of the parser. """ - from smart_parsing import PoolSmartParser - sp = PoolSmartParser() + sp = smart_parsing.PoolSmartParser() query = sp.parse(query_str) return query @@ -3587,6 +3592,9 @@ def _parse_prefix_query(self, query_str): This is a helper function to smart_search_prefix for easier unit testing of the parser. """ + sp = smart_parsing.PrefixSmartParser() + query = sp.parse(query_str) + return query # find query parts query_str_parts = self._get_query_parts(query_str) @@ -4417,71 +4425,5 @@ def search_tag(self, auth, query, search_options=None): -class NipapError(Exception): - """ NIPAP base error class. - """ - - error_code = 1000 - - -class NipapInputError(NipapError): - """ Erroneous input. - - A general input error. - """ - - error_code = 1100 - - -class NipapMissingInputError(NipapInputError): - """ Missing input. - - Most input is passed in dicts, this could mean a missing key in a dict. - """ - - error_code = 1110 - - -class NipapExtraneousInputError(NipapInputError): - """ Extraneous input. - - Most input is passed in dicts, this could mean an unknown key in a dict. - """ - - error_code = 1120 - - -class NipapNoSuchOperatorError(NipapInputError): - """ A non existent operator was specified. - """ - - error_code = 1130 - - -class NipapValueError(NipapError): - """ Something wrong with a value - - For example, trying to send an integer when an IP address is expected. - """ - - error_code = 1200 - - -class NipapNonExistentError(NipapError): - """ A non existent object was specified - - For example, try to get a prefix from a pool which doesn't exist. - """ - - error_code = 1300 - - -class NipapDuplicateError(NipapError): - """ The passed object violates unique constraints - - For example, create a VRF with a name of an already existing one. - """ - - error_code = 1400 # vim: et ts=4 : diff --git a/nipap/nipap/errors.py b/nipap/nipap/errors.py new file mode 100644 index 000000000..e6413629f --- /dev/null +++ b/nipap/nipap/errors.py @@ -0,0 +1,67 @@ + +class NipapError(Exception): + """ NIPAP base error class. + """ + + error_code = 1000 + + +class NipapInputError(NipapError): + """ Erroneous input. + + A general input error. + """ + + error_code = 1100 + + +class NipapMissingInputError(NipapInputError): + """ Missing input. + + Most input is passed in dicts, this could mean a missing key in a dict. + """ + + error_code = 1110 + + +class NipapExtraneousInputError(NipapInputError): + """ Extraneous input. + + Most input is passed in dicts, this could mean an unknown key in a dict. + """ + + error_code = 1120 + + +class NipapNoSuchOperatorError(NipapInputError): + """ A non existent operator was specified. + """ + + error_code = 1130 + + +class NipapValueError(NipapError): + """ Something wrong with a value + + For example, trying to send an integer when an IP address is expected. + """ + + error_code = 1200 + + +class NipapNonExistentError(NipapError): + """ A non existent object was specified + + For example, try to get a prefix from a pool which doesn't exist. + """ + + error_code = 1300 + + +class NipapDuplicateError(NipapError): + """ The passed object violates unique constraints + + For example, create a VRF with a name of an already existing one. + """ + + error_code = 1400 diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index 3854895ad..d64716413 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -4,51 +4,134 @@ import logging import re -from pyparsing import Forward, Group, nestedExpr, ParseResults, QuotedString, Word, ZeroOrMore, alphanums, nums, oneOf +import IPy + +from pyparsing import Combine, Forward, Group, Literal, nestedExpr, OneOrMore, ParseResults, quotedString, Regex, QuotedString, Word, ZeroOrMore, alphanums, nums, oneOf + +from errors import * -op_text = { - '=': 'equals', - '<': 'is less than' - } class SmartParser: - columns = {} - match_operators = ['=', '<', '>', '<=', '=>', '~'] + attributes = {} + match_operators = ['=', '<', '>', '<=', '>=', '~'] boolean_operators = ['and', 'or'] def __init__(self): self._logger = logging.getLogger(self.__class__.__name__) + def _is_ipv4(self, ip): + """ Return true if given arg is a valid IPv4 address + """ + try: + p = IPy.IP(ip) + except ValueError: + return False + + if p.version() == 4: + return True + return False + + + def _is_ipv6(self, ip): + """ Return true if given arg is a valid IPv6 address + """ + try: + p = IPy.IP(ip) + except ValueError: + return False + + if p.version() == 6: + return True + return False + + + def _get_afi(self, ip): + """ Return address-family (4 or 6) for IP or None if invalid address + """ + + parts = unicode(ip).split("/") + if len(parts) == 1: + # just an address + if self._is_ipv4(ip): + return 4 + elif self._is_ipv6(ip): + return 6 + else: + return None + elif len(parts) == 2: + # a prefix! + try: + pl = int(parts[1]) + except ValueError: + # if casting parts[1] to int failes, this is not a prefix.. + return None + + if self._is_ipv4(parts[0]): + if pl >= 0 and pl <= 32: + # prefix mask must be between 0 and 32 + return 4 + # otherwise error + return None + elif self._is_ipv6(parts[0]): + if pl >= 0 and pl <= 128: + # prefix mask must be between 0 and 128 + return 6 + # otherwise error + return None + else: + return None + else: + # more than two parts.. this is neither an address or a prefix + return None + + def _string_to_ast(self, input_string): """ Parse a smart search string and return it in an AST like form """ # simple words - word = Word(alphanums + "-.") + word = Word(alphanums + "-./").setResultsName('word') # numbers - number = Word(nums) + number = Word(nums).setResultsName('number') + + # IPv4 address + ipv4_oct = Regex("((2(5[0-5]|[0-4][0-9])|[01]?[0-9][0-9]?))") + ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)).setResultsName('ipv4_address') + + # VRF RTs of the form number:number + vrf_rt = Combine((ipv4_address | number) + Literal(':') + number).setResultsName('vrf_rt') + + # tags + tags = Combine( Literal('#') + word).setResultsName('tag') + # operators for matching - match_op = oneOf(' '.join(self.match_operators)) + match_op = oneOf(' '.join(self.match_operators)).setResultsName('operator') + boolean_op = oneOf(' '.join(self.boolean_operators)).setResultsName('boolean') # quoted string - quoted_string = QuotedString('"', unquoteResults=True, escChar='\\') + quoted_string = QuotedString('"', unquoteResults=True, escChar='\\').setResultsName('quoted_string') # expression to match a certain value for an attribute - expression = Group(word + match_op + (quoted_string | word | number)) - # we work on atoms, which are single words, quoted strings or match expressions - atom = (quoted_string | expression | word ) + expression = Group(word + match_op + (quoted_string | word | number)).setResultsName('expression') + # we work on atoms, which are single quoted strings, match expressions, + # tags, VRF RT or simple words. + # NOTE: Place them in order of most exact match first! + atom = Group(quoted_string | expression | tags | vrf_rt | boolean_op | word) enclosed = Forward() parens = nestedExpr('(', ')', content=enclosed) - enclosed << ( - atom | parens - ) +# enclosed << ( +# atom | parens +# ) +# TODO: enable above, we skip parentheses for now + enclosed << ( atom ) content = Forward() content << ( ZeroOrMore(enclosed) ) - return content.parseString(input_string) + res = content.parseString(input_string) + return res def _ast_to_dictsql(self, ast): @@ -67,62 +150,53 @@ def _ast_to_dictsql(self, ast): } for part, lookahead in izip_longest(ast, ast[1:]): - #self._logger.debug("part: %s %s" % (part, type(part))) - print "part: %s lookahead: %s" % (part, lookahead) + self._logger.debug("part: %s %s" % (part, type(part))) # handle operators joining together expressions - if isinstance(part, basestring) and part.lower() in self.boolean_operators: - dss['operator'] = part.lower() + if part.getName() == 'boolean': + op = part[0].lower() + dss['operator'] = op dss['interpretation'] = { - 'interpretation': part.lower(), - 'operator': part.lower() + 'interpretation': op, + 'operator': op } - #self._logger.debug("operator part: %s" % part.lower()) continue # string expr that we expand to dictsql expression - elif isinstance(part, basestring): - # dict sql expression - dse = self._string_to_dictsql(part) - #self._logger.debug('string part: %s => %s' % (part, dse)) - elif isinstance(part, ParseResults): - if part[1] in self.match_operators: + elif part.getName() == 'expression': + if part.operator in self.match_operators: dse = self._parse_expr(part) - print dse else: -# for word in part: -# if str(word).lower() in ('or', 'and'): -# self._logger.debug('AND/ORing: ' + str(word) + str(type(word))) dse = self._ast_to_dictsql(part) + elif part.getName() in ('word', 'tag', 'vrf_rt'): + # dict sql expression + dse = self._string_to_dictsql(part) + self._logger.debug('string part: %s => %s' % (part, dse)) else: raise ParserError("Unhandled part in AST: %s" % part) - print "DSE:", dse + + if dss['val1'] is None: + self._logger.debug('val1 not set, using dse: %s' % str(dse)) + dss['val1'] = dse + else: + self._logger.debug("val1 is set, operator is '%s', val2 = dst: %s" % (dss['operator'], str(dse))) + dss['val2'] = dse if lookahead is not None: if dss['val1'] is not None and dss['val2'] is not None: - print "nesting!" dss = { 'operator': None, 'val1': dss, 'val2': None } - if dss['val1'] is None: - #self._logger.debug('val1 not set, using dse: %s' % str(dse)) - dss['val1'] = dse - else: - #self._logger.debug("val1 is set, operator is '%s', val2 = dst: %s" % (dss['operator'], str(dse))) - dss['val2'] = dse - - # special handling when AST is only one expression, then we overwrite # the dss with dse if len(ast) == 1: dss = dse if len(ast) == 0: - dss = self._string_to_dictsql('') - + dss = self._string_to_dictsql(ParseResults('', 'word')) # return the final composed stack of dictsql expressions return dss @@ -144,6 +218,9 @@ def _parse_expr(self, part): self._logger.debug("parsing expression: " + str(part)) key, op, val = part + if key not in self.attributes: + raise NotImplementedError() + dictsql = { 'operator': op, 'val1': key, @@ -161,86 +238,420 @@ def _add_implicit_ops(self, input_ast): res_ast = [] for token, lookahead in izip_longest(input_ast, input_ast[1:]): - if isinstance(token, str) and token.lower() in self.boolean_operators: - res_ast.append(token) + if token.getName() == "boolean": + # only add boolean operator if it is NOT the last token + if lookahead is not None: + res_ast.append(token) continue - if isinstance(lookahead, str) and lookahead.lower() in self.boolean_operators: + else: + # add non-boolean token res_ast.append(token) - continue - res_ast.append(token) - if lookahead is not None: - res_ast.append('and') + # if next token is boolean, continue so it can be added + if lookahead is None or lookahead.getName() == "boolean": + continue + # if next token is NOT a boolean, add implicit AND + res_ast.append(ParseResults('and', 'boolean')) return res_ast def parse(self, input_string): + # check for unclosed quotes/parentheses + paired_exprs = nestedExpr('(', ')') | quotedString + stripped_line = paired_exprs.suppress().transformString(input_string) + if '"' in stripped_line: + raise NipapValueError('Unclosed quote') + raw_ast = self._string_to_ast(input_string) ast = self._add_implicit_ops(raw_ast) return self._ast_to_dictsql(ast) -class VrfSmartParser(SmartParser): - columns = { - 'rt', - 'name', - 'description', - 'tags' - } + class PoolSmartParser(SmartParser): + attributes = { + 'name': True, + 'description': True + } - def _string_to_dictsql(self, string): + def _string_to_dictsql(self, part): """ Do magic matching of single words or quoted string """ - self._logger.debug("parsing string: " + str(string)) - #self._logger.debug("Query part '" + string + "' interpreted as text") + self._logger.debug("parsing string: " + str(part[0]) + " of type: " + part.getName()) - if re.match('#', string): - self._logger.debug("Query part '" + string + "' interpreted as tag") + if part.getName() == 'tag': + self._logger.debug("Query part '" + part[0][0] + "' interpreted as tag") dictsql = { 'interpretation': { - 'string': string, - 'interpretation': '(inherited) tag', + 'string': part[0][0], + 'interpretation': 'tag', 'attribute': 'tag', 'operator': 'equals_any', }, + 'operator': 'equals_any', + 'val1': 'tags', + 'val2': part[0]['word'] + } + + elif part.getName() == 'vrf_rt': + self._logger.debug("Query part '" + part.vrf_rt[0] + "' interpreted as VRF RT") + # TODO: enable this, our fancy new interpretation + dictsql = { + 'interpretation': { + 'attribute': 'VRF RT', + 'interpretation': 'vrf_rt', + 'operator': 'equals', + 'string': part.vrf_rt[0] + }, + 'operator': 'equals', + 'val1': 'vrf_rt', + 'val2': part.vrf_rt[0] + } + # using old interpretation for the time being to make sure we align + # with old smart search interpreter + dictsql = { + 'interpretation': { + 'attribute': 'name or description', + 'interpretation': 'text', + 'operator': 'regex', + 'string': part.vrf_rt[0] + }, 'operator': 'or', 'val1': { - 'operator': 'equals_any', - 'val1': 'tags', - 'val2': string[1:] + 'operator': 'regex_match', + 'val1': 'name', + 'val2': part.vrf_rt[0] }, 'val2': { - 'operator': 'equals_any', - 'val1': 'inherited_tags', - 'val2': string[1:] + 'operator': 'regex_match', + 'val1': 'description', + 'val2': part.vrf_rt[0] } } else: - self._logger.debug("Query part '" + string + "' interpreted as text") + self._logger.debug("Query part '" + part[0] + "' interpreted as text") + dictsql = { + 'interpretation': { + 'attribute': 'name or description', + 'interpretation': 'text', + 'operator': 'regex', + 'string': part[0] + }, + 'operator': 'or', + 'val1': { + 'operator': 'regex_match', + 'val1': 'name', + 'val2': part[0] + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'description', + 'val2': part[0] + } + } + + return dictsql + + + +class PrefixSmartParser(SmartParser): + attributes = {} + + def _string_to_dictsql(self, part): + """ Do magic matching of single words or quoted string + """ + self._logger.debug("parsing string: " + str(part[0]) + " of type: " + part.getName()) + + if part.getName() == 'tag': + self._logger.debug("Query part '" + part[0][0] + "' interpreted as tag") + dictsql = { + 'interpretation': { + 'string': part[0][0], + 'interpretation': 'tag', + 'attribute': 'tag', + 'operator': 'equals_any', + }, + 'operator': 'equals_any', + 'val1': 'tags', + 'val2': part[0]['word'] + } + + elif part.getName() == 'ipv4_address': + dictsql = {} + elif part.getName() == 'vrf_rt': + self._logger.debug("Query part '" + part.vrf_rt[0] + "' interpreted as VRF RT") + # TODO: enable this, our fancy new interpretation + dictsql = { + 'interpretation': { + 'attribute': 'VRF RT', + 'interpretation': 'vrf_rt', + 'operator': 'equals', + 'string': part.vrf_rt[0] + }, + 'operator': 'equals', + 'val1': 'vrf_rt', + 'val2': part.vrf_rt[0] + } + # using old interpretation for the time being to make sure we align + # with old smart search interpreter dictsql = { 'interpretation': { 'attribute': 'name or description', 'interpretation': 'text', 'operator': 'regex', - 'string': string + 'string': part.vrf_rt[0] }, 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', - 'val2': string + 'val2': part.vrf_rt[0] }, 'val2': { 'operator': 'regex_match', 'val1': 'description', - 'val2': string + 'val2': part.vrf_rt[0] + } + } + + else: + # since it's difficult to parse IP addresses using pyparsing we do a + # bit of good ol parsing here + + if self._get_afi(part[0]) == 4 and len(part[0].split('/')) == 2: + self._logger.debug("Query part '" + part[0] + "' interpreted as prefix") + address, prefix_length = part[0].split('/') + + # complete a prefix to it's fully expanded form + # 10/8 will be expanded into 10.0.0.0/8 which PostgreSQL can + # parse correctly + while len(address.split('.')) < 4: + address += '.0' + + prefix = address + '/' + prefix_length + strict_prefix = str(IPy.IP(part[0], make_net = True)) + + interp = { + 'string': part[0], + 'interpretation': 'IPv4 prefix', + 'attribute': 'prefix', + 'operator': 'contained_within_equals', + } + + if prefix != part[0]: + interp['expanded'] = prefix + + if prefix != strict_prefix: + interp['strict_prefix'] = strict_prefix + + dictsql = { + 'interpretation': interp, + 'operator': 'contained_within_equals', + 'val1': 'prefix', + 'val2': strict_prefix + } + + # IPv4 address + # split on dot to make sure we have all four octets before we do a + # search + elif self._get_afi(part[0]) == 4 and len(part[0].split('.')) == 4: + self._logger.debug("Query part '" + part[0] + "' interpreted as prefix") + dictsql = { + 'interpretation': { + 'string': part[0], + 'interpretation': 'IPv4 address', + 'attribute': 'prefix', + 'operator': 'contains_equals', + }, + 'operator': 'contains_equals', + 'val1': 'prefix', + 'val2': part[0] + } + + # IPv6 prefix + elif self._get_afi(part[0]) == 6 and len(part[0].split('/')) == 2: + self._logger.debug("Query part '" + part[0] + "' interpreted as IPv6 prefix") + strict_prefix = str(IPy.IP(part[0], make_net = True)) + interp = { + 'string': part[0], + 'interpretation': 'IPv6 prefix', + 'attribute': 'prefix', + 'operator': 'contained_within_equals' + } + if part[0] != strict_prefix: + interp['strict_prefix'] = strict_prefix + + dictsql = { + 'interpretation': interp, + 'operator': 'contained_within_equals', + 'val1': 'prefix', + 'val2': strict_prefix + } + + # IPv6 address + elif self._get_afi(part[0]) == 6: + self._logger.debug("Query part '" + part[0] + "' interpreted as IPv6 address") + dictsql = { + 'interpretation': { + 'string': part[0], + 'interpretation': 'IPv6 address', + 'attribute': 'prefix', + 'operator': 'contains_equals', + }, + 'operator': 'contains_equals', + 'val1': 'prefix', + 'val2': part[0] + } + else: + # Description or comment + self._logger.debug("Query part '" + part[0] + "' interpreted as text") + dictsql = { + 'interpretation': { + 'string': part[0], + 'interpretation': 'text', + 'attribute': 'description or comment or node or order_id or customer_id', + 'operator': 'regex', + }, + 'operator': 'or', + 'val1': { + 'operator': 'or', + 'val1': { + 'operator': 'or', + 'val1': { + 'operator': 'or', + 'val1': { + 'operator': 'regex_match', + 'val1': 'comment', + 'val2': part[0] + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'description', + 'val2': part[0] + } + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'node', + 'val2': part[0] + } + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'order_id', + 'val2': part[0] + }, + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'customer_id', + 'val2': part[0] + } + } + + return dictsql + + + +class VrfSmartParser(SmartParser): + attributes = {} + + def _string_to_dictsql(self, part): + """ Do magic matching of single words or quoted string + """ + self._logger.debug("parsing string: " + str(part[0]) + " of type: " + part.getName()) + + if part.getName() == 'tag': + self._logger.debug("Query part '" + part[0][0] + "' interpreted as tag") + dictsql = { + 'interpretation': { + 'string': part[0][0], + 'interpretation': 'tag', + 'attribute': 'tag', + 'operator': 'equals_any', + }, + 'operator': 'equals_any', + 'val1': 'tags', + 'val2': part[0]['word'] + } + + elif part.getName() == 'vrf_rt': + self._logger.debug("Query part '" + part.vrf_rt[0] + "' interpreted as VRF RT") + # TODO: enable this, our fancy new interpretation + dictsql = { + 'interpretation': { + 'attribute': 'VRF RT', + 'interpretation': 'vrf_rt', + 'operator': 'equals', + 'string': part.vrf_rt[0] + }, + 'operator': 'equals', + 'val1': 'vrf_rt', + 'val2': part.vrf_rt[0] + } + # using old interpretation for the time being to make sure we align + # with old smart search interpreter + dictsql = { + 'interpretation': { + 'string': part.vrf_rt[0], + 'interpretation': 'text', + 'attribute': 'vrf or name or description', + 'operator': 'regex', + }, + 'operator': 'or', + 'val1': { + 'operator': 'or', + 'val1': { + 'operator': 'regex_match', + 'val1': 'name', + 'val2': part.vrf_rt[0] + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'description', + 'val2': part.vrf_rt[0] } + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'rt', + 'val2': part.vrf_rt[0] } + } + + else: + self._logger.debug("Query part '" + part[0] + "' interpreted as text") + dictsql = { + 'interpretation': { + 'string': part[0], + 'interpretation': 'text', + 'attribute': 'vrf or name or description', + 'operator': 'regex', + }, + 'operator': 'or', + 'val1': { + 'operator': 'or', + 'val1': { + 'operator': 'regex_match', + 'val1': 'name', + 'val2': part[0] + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'description', + 'val2': part[0] + } + }, + 'val2': { + 'operator': 'regex_match', + 'val1': 'rt', + 'val2': part[0] + } + } return dictsql @@ -250,6 +661,8 @@ class ParserError(Exception): """ General parser error """ + + if __name__ == '__main__': # set logging format LOG_FORMAT = "%(asctime)s: %(module)-10s %(levelname)-8s %(message)s" @@ -258,7 +671,7 @@ class ParserError(Exception): logger = logging.getLogger() logger.setLevel(logging.DEBUG) - p = SmartParser() + p = PoolSmartParser() #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar")') #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar"))') import sys diff --git a/tests/nipaptest.py b/tests/nipaptest.py index db38a9761..a1b85dc57 100755 --- a/tests/nipaptest.py +++ b/tests/nipaptest.py @@ -1943,7 +1943,7 @@ def test_prefix3(self): def test_prefix4(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() - with self.assertRaisesRegexp(nipap.backend.NipapValueError, 'No closing quotation'): + with self.assertRaisesRegexp(nipap.backend.NipapValueError, 'Unclosed quote'): query = n._parse_prefix_query('"') @@ -2042,8 +2042,8 @@ def test_vrf3(self): def test_vrf4(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() - with self.assertRaisesRegexp(nipap.backend.NipapValueError, 'No closing quotation'): - query, interp = n._parse_vrf_query('"') + with self.assertRaisesRegexp(nipap.backend.NipapValueError, 'Unclosed quote'): + query = n._parse_vrf_query('"') @@ -2166,7 +2166,7 @@ def test_pool2(self): 'val2': u'123:456' } } - self.assertEqual(query, exp_query) + self.assertEqual(exp_query, query) @@ -2179,29 +2179,29 @@ def test_pool3(self): 'attribute': 'tag', 'interpretation': 'tag', 'operator': 'equals_any', - 'string': u'#bar' + 'string': '#bar' }, 'operator': 'equals_any', 'val1': 'tags', - 'val2': u'bar' + 'val2': 'bar' } - self.assertEqual(query, exp_query) + self.assertEqual(exp_query, query) def test_pool4(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() - with self.assertRaisesRegexp(nipap.backend.NipapValueError, 'No closing quotation'): - query, interp = n._parse_pool_query('"') + with self.assertRaisesRegexp(nipap.backend.NipapValueError, 'Unclosed quote'): + query = n._parse_pool_query('"') def test_pool5(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() - query = n._parse_pool_query('foo bar') + query = n._parse_pool_query('#foo and bar') exp_query = { 'interpretation': { 'interpretation': 'and', @@ -2210,22 +2210,14 @@ def test_pool5(self): 'operator': 'and', 'val1': { 'interpretation': { - 'attribute': 'name or description', - 'interpretation': 'text', - 'operator': 'regex', - 'string': u'foo' - }, - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'name', - 'val2': u'foo' + 'attribute': 'tag', + 'interpretation': 'tag', + 'operator': 'equals_any', + 'string': '#foo' }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': u'foo' - } + 'operator': 'equals_any', + 'val1': 'tags', + 'val2': 'foo' }, 'val2': { 'interpretation': { From fe8b711e641b8b73b4af8a422b64d9993859572f Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Tue, 14 Jul 2015 21:46:05 +0200 Subject: [PATCH 07/14] Remove old smart parsing logic Since this is now replaced by the dedicated smart parsing module. --- nipap/nipap/backend.py | 320 ----------------------------------- nipap/nipap/smart_parsing.py | 3 + 2 files changed, 3 insertions(+), 320 deletions(-) diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py index 67f383b28..b6d023a11 100644 --- a/nipap/nipap/backend.py +++ b/nipap/nipap/backend.py @@ -1598,78 +1598,6 @@ def _parse_vrf_query(self, query_str): query = sp.parse(query_str) return query - # find query parts - query_str_parts = self._get_query_parts(query_str) - - # go through parts and add to query_parts list - query_parts = list() - for query_str_part in query_str_parts: - - # tags - if re.match('#', query_str_part['string']): - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as tag") - query_parts.append({ - 'interpretation': { - 'string': query_str_part['string'], - 'interpretation': 'tag', - 'attribute': 'tag', - 'operator': 'equals_any', - }, - 'operator': 'equals_any', - 'val1': 'tags', - 'val2': query_str_part['string'][1:] - }) - - else: - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as text") - query_parts.append({ - 'interpretation': { - 'string': query_str_part['string'], - 'interpretation': 'text', - 'attribute': 'vrf or name or description', - 'operator': 'regex', - }, - 'operator': 'or', - 'val1': { - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'name', - 'val2': query_str_part['string'] - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': query_str_part['string'] - } - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'rt', - 'val2': query_str_part['string'] - } - }) - - # Sum all query parts to one query - query = {} - if len(query_parts) > 0: - query = query_parts[0] - - if len(query_parts) > 1: - query = query_parts[-1] - for query_part in reversed(query_parts[:-1]): - query = { - 'interpretation': { - 'interpretation': 'and', - 'operator': 'and', - }, - 'operator': 'and', - 'val1': query_part, - 'val2': query - } - - return query - # @@ -2257,70 +2185,6 @@ def _parse_pool_query(self, query_str): query = sp.parse(query_str) return query - # find query parts - query_str_parts = self._get_query_parts(query_str) - - # go through parts and add to query_parts list - query_parts = list() - for query_str_part in query_str_parts: - - # tags - if re.match('#', query_str_part['string']): - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as tag") - query_parts.append({ - 'interpretation': { - 'string': query_str_part['string'], - 'interpretation': 'tag', - 'attribute': 'tag', - 'operator': 'equals_any', - }, - 'operator': 'equals_any', - 'val1': 'tags', - 'val2': query_str_part['string'][1:] - }) - - else: - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as text") - query_parts.append({ - 'interpretation': { - 'string': query_str_part['string'], - 'interpretation': 'text', - 'attribute': 'name or description', - 'operator': 'regex', - }, - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'name', - 'val2': query_str_part['string'] - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': query_str_part['string'] - } - }) - - # Sum all query parts to one query - query = {} - if len(query_parts) > 0: - query = query_parts[0] - - if len(query_parts) > 1: - query = query_parts[-1] - for query_part in reversed(query_parts[:-1]): - query = { - 'interpretation': { - 'interpretation': 'and', - 'operator': 'and', - }, - 'operator': 'and', - 'val1': query_part, - 'val2': query - } - - return query - # @@ -3596,190 +3460,6 @@ def _parse_prefix_query(self, query_str): query = sp.parse(query_str) return query - # find query parts - query_str_parts = self._get_query_parts(query_str) - - # go through parts and add to query_parts list - query_parts = list() - for query_str_part in query_str_parts: - - # tags - if re.match('#', query_str_part['string']): - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as tag") - query_parts.append({ - 'interpretation': { - 'string': query_str_part['string'], - 'interpretation': '(inherited) tag', - 'attribute': 'tag', - 'operator': 'equals_any', - }, - 'operator': 'or', - 'val1': { - 'operator': 'equals_any', - 'val1': 'tags', - 'val2': query_str_part['string'][1:] - }, - 'val2': { - 'operator': 'equals_any', - 'val1': 'inherited_tags', - 'val2': query_str_part['string'][1:] - } - }) - - # IPv4 prefix - elif self._get_afi(query_str_part['string']) == 4 and len(query_str_part['string'].split('/')) == 2: - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as prefix") - address, prefix_length = query_str_part['string'].split('/') - - # complete a prefix to it's fully expanded form - # 10/8 will be expanded into 10.0.0.0/8 which PostgreSQL can - # parse correctly - while len(address.split('.')) < 4: - address += '.0' - - prefix = address + '/' + prefix_length - strict_prefix = str(IPy.IP(query_str_part['string'], make_net = True)) - - interp = { - 'string': query_str_part['string'], - 'interpretation': 'IPv4 prefix', - 'attribute': 'prefix', - 'operator': 'contained_within_equals', - } - - if prefix != query_str_part['string']: - interp['expanded'] = prefix - - if prefix != strict_prefix: - interp['strict_prefix'] = strict_prefix - - query_parts.append({ - 'interpretation': interp, - 'operator': 'contained_within_equals', - 'val1': 'prefix', - 'val2': strict_prefix - }) - - # IPv4 address - # split on dot to make sure we have all four octets before we do a - # search - elif self._get_afi(query_str_part['string']) == 4 and len(query_str_part['string'].split('.')) == 4: - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as prefix") - query_parts.append({ - 'interpretation': { - 'string': query_str_part['string'], - 'interpretation': 'IPv4 address', - 'attribute': 'prefix', - 'operator': 'contains_equals', - }, - 'operator': 'contains_equals', - 'val1': 'prefix', - 'val2': query_str_part['string'] - }) - - # IPv6 prefix - elif self._get_afi(query_str_part['string']) == 6 and len(query_str_part['string'].split('/')) == 2: - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as IPv6 prefix") - strict_prefix = str(IPy.IP(query_str_part['string'], make_net = True)) - interp = { - 'string': query_str_part['string'], - 'interpretation': 'IPv6 prefix', - 'attribute': 'prefix', - 'operator': 'contained_within_equals' - } - if query_str_part['string'] != strict_prefix: - interp['strict_prefix'] = strict_prefix - - query_parts.append({ - 'interpretation': interp, - 'operator': 'contained_within_equals', - 'val1': 'prefix', - 'val2': strict_prefix - }) - - # IPv6 address - elif self._get_afi(query_str_part['string']) == 6: - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as IPv6 address") - query_parts.append({ - 'interpretation': { - 'string': query_str_part['string'], - 'interpretation': 'IPv6 address', - 'attribute': 'prefix', - 'operator': 'contains_equals', - }, - 'operator': 'contains_equals', - 'val1': 'prefix', - 'val2': query_str_part['string'] - }) - - # Description or comment - # TODO: add an equal search for VRF here - else: - self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as desc/comment") - query_parts.append({ - 'interpretation': { - 'string': query_str_part['string'], - 'interpretation': 'text', - 'attribute': 'description or comment or node or order_id or customer_id', - 'operator': 'regex', - }, - 'operator': 'or', - 'val1': { - 'operator': 'or', - 'val1': { - 'operator': 'or', - 'val1': { - 'operator': 'or', - 'val1': { - 'operator': 'regex_match', - 'val1': 'comment', - 'val2': query_str_part['string'] - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'description', - 'val2': query_str_part['string'] - } - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'node', - 'val2': query_str_part['string'] - } - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'order_id', - 'val2': query_str_part['string'] - }, - }, - 'val2': { - 'operator': 'regex_match', - 'val1': 'customer_id', - 'val2': query_str_part['string'] - } - }) - - # Sum all query parts to one query - query = {} - if len(query_parts) > 0: - query = query_parts[0] - - if len(query_parts) > 1: - query = query_parts[-1] - for query_part in reversed(query_parts[:-1]): - query = { - 'interpretation': { - 'interpretation': 'and', - 'operator': 'and', - }, - 'operator': 'and', - 'val1': query_part, - 'val2': query - } - - return query - # diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index d64716413..01c7bf26f 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -262,6 +262,9 @@ def parse(self, input_string): stripped_line = paired_exprs.suppress().transformString(input_string) if '"' in stripped_line: raise NipapValueError('Unclosed quote') + # TODO: add test case for this + if '(' in stripped_line or ')' in stripped_line: + raise NipapValueError('Unclosed parentheses') raw_ast = self._string_to_ast(input_string) ast = self._add_implicit_ops(raw_ast) From 7661b43539879b4d1055e9d07b6f106aff1b6ef3 Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Tue, 14 Jul 2015 22:25:51 +0200 Subject: [PATCH 08/14] smart-parse: add attributes for all objects This adds the list of attributes for Pools, Prefixes and VRFs. Also updates the test suite with an extra test to search using an "expression", ie attribute *operator* value. --- nipap/nipap/smart_parsing.py | 68 +++++++++++++++++++++++++++++++++--- tests/nipaptest.py | 46 ++++++++++++++++++++++++ 2 files changed, 110 insertions(+), 4 deletions(-) diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index 01c7bf26f..d347606d0 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -276,10 +276,29 @@ def parse(self, input_string): class PoolSmartParser(SmartParser): attributes = { + 'default_type': True, + 'description': True, + 'free_addresses_v4': True, + 'free_addresses_v6': True, + 'free_prefixes_v4': True, + 'free_prefixes_v6': True, + 'ipv4_default_prefix_length': True, + 'ipv6_default_prefix_length': True, + 'member_prefixes_v4': True, + 'member_prefixes_v6': True, 'name': True, - 'description': True + 'total_addresses_v4': True, + 'total_addresses_v6': True, + 'total_prefixes_v4': True, + 'total_prefixes_v6': True, + 'used_addresses_v4': True, + 'used_addresses_v6': True, + 'used_prefixes_v4': True, + 'used_prefixes_v6': True, + 'vrf': True, } + def _string_to_dictsql(self, part): """ Do magic matching of single words or quoted string """ @@ -362,7 +381,36 @@ def _string_to_dictsql(self, part): class PrefixSmartParser(SmartParser): - attributes = {} + attributes = { + 'added': True, + 'alarm_priority': True, + 'authoritative_source': True, + 'children': True, + 'comment': True, + 'country': True, + 'customer_id': True, + 'description': True, + 'display': True, + 'display_prefix': True, + 'expires': True, + 'external_key': True, + 'family': True, + 'free_addreses': True, + 'indent': True, + 'last_modified': True, + 'match': True, + 'monitor': True, + 'node': True, + 'order_id': True, + 'pool': True, + 'prefix': True, + 'status': True, + 'total_addresses': True, + 'type': True, + 'used_addreses': True, + 'vlan': True, + 'vrf': True, + } def _string_to_dictsql(self, part): """ Do magic matching of single words or quoted string @@ -561,7 +609,19 @@ def _string_to_dictsql(self, part): class VrfSmartParser(SmartParser): - attributes = {} + attributes = { + 'description': True, + 'free_addresses_v4': True, + 'free_addresses_v6': True, + 'name': True, + 'num_prefixes_v4': True, + 'num_prefixes_v6': True, + 'rt': True, + 'total_addresses_v4': True, + 'total_addresses_v6': True, + 'used_addresses_v4': True, + 'used_addresses_v6': True, + } def _string_to_dictsql(self, part): """ Do magic matching of single words or quoted string @@ -674,7 +734,7 @@ class ParserError(Exception): logger = logging.getLogger() logger.setLevel(logging.DEBUG) - p = PoolSmartParser() + p = PrefixSmartParser() #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar")') #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar"))') import sys diff --git a/tests/nipaptest.py b/tests/nipaptest.py index a1b85dc57..53808fb21 100755 --- a/tests/nipaptest.py +++ b/tests/nipaptest.py @@ -1948,6 +1948,52 @@ def test_prefix4(self): + def test_prefix5(self): + cfg = NipapConfig('/etc/nipap/nipap.conf') + n = Nipap() + query = n._parse_prefix_query('foo-agg-1 vlan>100 vlan< 200') + exp_query = { + 'interpretation': {'interpretation': 'and', 'operator': 'and'}, + 'operator': 'and', + 'val1': {'interpretation': {'interpretation': 'and', 'operator': 'and'}, + 'operator': 'and', + 'val1': {'interpretation': {'attribute': 'description or comment or node or order_id or customer_id', + 'interpretation': 'text', + 'operator': 'regex', + 'string': 'foo-agg-1'}, + 'operator': 'or', + 'val1': {'operator': 'or', + 'val1': {'operator': 'or', + 'val1': {'operator': 'or', + 'val1': {'operator': 'regex_match', + 'val1': 'comment', + 'val2': 'foo-agg-1'}, + 'val2': {'operator': 'regex_match', + 'val1': 'description', + 'val2': 'foo-agg-1'}}, + 'val2': {'operator': 'regex_match', + 'val1': 'node', + 'val2': 'foo-agg-1'}}, + 'val2': {'operator': 'regex_match', + 'val1': 'order_id', + 'val2': 'foo-agg-1'}}, + 'val2': {'operator': 'regex_match', + 'val1': 'customer_id', + 'val2': 'foo-agg-1'}}, + 'val2': {'interpretation': 'expression', + 'operator': '>', + 'val1': 'vlan', + 'val2': '100'}}, + 'val2': {'interpretation': 'expression', + 'operator': '<', + 'val1': 'vlan', + 'val2': '200'}} + + + self.assertEqual(query, exp_query) + + + def test_vrf1(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() From 0515938b06ed61cd7996a7a54ae043fbacdc00b1 Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Tue, 14 Jul 2015 23:01:57 +0200 Subject: [PATCH 09/14] smart-parse: improve parsing of expressions The way we build the groupings for pyparsing is reflected in the results. Setting the results name of a component that is part of another component yields ParseResults of different "depth" where we have to go through two lists to access the value we want. The real problem is that things like vrf_rt was built out of different components and so the result would come in at different depths compared to a 'word' or something else. Handling different depths is a PITA so we avoid that by not building components out of other named components. The interpretation of "expressions" has been improved too, partly by following the interpretation format used elsewhere and by including relevant information. Part of #531. --- nipap/nipap/smart_parsing.py | 54 +++++++++++++++++++++--------------- tests/nipaptest.py | 30 +++++++++++++++----- 2 files changed, 54 insertions(+), 30 deletions(-) diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index d347606d0..6ffef875c 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -91,19 +91,22 @@ def _string_to_ast(self, input_string): """ # simple words - word = Word(alphanums + "-./").setResultsName('word') + comp_word = Word(alphanums + "-./_") + word = Word(alphanums + "-./_").setResultsName('word') # numbers + comp_number = Word(nums) number = Word(nums).setResultsName('number') # IPv4 address ipv4_oct = Regex("((2(5[0-5]|[0-4][0-9])|[01]?[0-9][0-9]?))") + comp_ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)) ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)).setResultsName('ipv4_address') # VRF RTs of the form number:number - vrf_rt = Combine((ipv4_address | number) + Literal(':') + number).setResultsName('vrf_rt') + vrf_rt = Combine((comp_ipv4_address | comp_number) + Literal(':') + comp_number).setResultsName('vrf_rt') # tags - tags = Combine( Literal('#') + word).setResultsName('tag') + tags = Combine( Literal('#') + comp_word).setResultsName('tag') # operators for matching match_op = oneOf(' '.join(self.match_operators)).setResultsName('operator') @@ -111,7 +114,7 @@ def _string_to_ast(self, input_string): # quoted string quoted_string = QuotedString('"', unquoteResults=True, escChar='\\').setResultsName('quoted_string') # expression to match a certain value for an attribute - expression = Group(word + match_op + (quoted_string | word | number)).setResultsName('expression') + expression = Group(word + match_op + (quoted_string | vrf_rt | word | number)).setResultsName('expression') # we work on atoms, which are single quoted strings, match expressions, # tags, VRF RT or simple words. # NOTE: Place them in order of most exact match first! @@ -224,8 +227,13 @@ def _parse_expr(self, part): dictsql = { 'operator': op, 'val1': key, - 'val2': val, - 'interpretation': 'expression' + 'val2': str(val), + 'interpretation': { + 'string': key + op + val, + 'interpretation': 'expression', + 'attribute': key, + 'operator': op + } } return dictsql @@ -308,29 +316,29 @@ def _string_to_dictsql(self, part): self._logger.debug("Query part '" + part[0][0] + "' interpreted as tag") dictsql = { 'interpretation': { - 'string': part[0][0], + 'string': part[0], 'interpretation': 'tag', 'attribute': 'tag', 'operator': 'equals_any', }, 'operator': 'equals_any', 'val1': 'tags', - 'val2': part[0]['word'] + 'val2': part[0][1:] } elif part.getName() == 'vrf_rt': - self._logger.debug("Query part '" + part.vrf_rt[0] + "' interpreted as VRF RT") + self._logger.debug("Query part '" + part.vrf_rt + "' interpreted as VRF RT") # TODO: enable this, our fancy new interpretation dictsql = { 'interpretation': { 'attribute': 'VRF RT', 'interpretation': 'vrf_rt', 'operator': 'equals', - 'string': part.vrf_rt[0] + 'string': part.vrf_rt }, 'operator': 'equals', 'val1': 'vrf_rt', - 'val2': part.vrf_rt[0] + 'val2': part.vrf_rt } # using old interpretation for the time being to make sure we align # with old smart search interpreter @@ -339,18 +347,18 @@ def _string_to_dictsql(self, part): 'attribute': 'name or description', 'interpretation': 'text', 'operator': 'regex', - 'string': part.vrf_rt[0] + 'string': part.vrf_rt }, 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', - 'val2': part.vrf_rt[0] + 'val2': part.vrf_rt }, 'val2': { 'operator': 'regex_match', 'val1': 'description', - 'val2': part.vrf_rt[0] + 'val2': part.vrf_rt } } @@ -632,35 +640,35 @@ def _string_to_dictsql(self, part): self._logger.debug("Query part '" + part[0][0] + "' interpreted as tag") dictsql = { 'interpretation': { - 'string': part[0][0], + 'string': part[0], 'interpretation': 'tag', 'attribute': 'tag', 'operator': 'equals_any', }, 'operator': 'equals_any', 'val1': 'tags', - 'val2': part[0]['word'] + 'val2': part[0][1:] } elif part.getName() == 'vrf_rt': - self._logger.debug("Query part '" + part.vrf_rt[0] + "' interpreted as VRF RT") + self._logger.debug("Query part '" + part.vrf_rt + "' interpreted as VRF RT") # TODO: enable this, our fancy new interpretation dictsql = { 'interpretation': { 'attribute': 'VRF RT', 'interpretation': 'vrf_rt', 'operator': 'equals', - 'string': part.vrf_rt[0] + 'string': part.vrf_rt }, 'operator': 'equals', 'val1': 'vrf_rt', - 'val2': part.vrf_rt[0] + 'val2': part.vrf_rt } # using old interpretation for the time being to make sure we align # with old smart search interpreter dictsql = { 'interpretation': { - 'string': part.vrf_rt[0], + 'string': part.vrf_rt, 'interpretation': 'text', 'attribute': 'vrf or name or description', 'operator': 'regex', @@ -671,18 +679,18 @@ def _string_to_dictsql(self, part): 'val1': { 'operator': 'regex_match', 'val1': 'name', - 'val2': part.vrf_rt[0] + 'val2': part.vrf_rt }, 'val2': { 'operator': 'regex_match', 'val1': 'description', - 'val2': part.vrf_rt[0] + 'val2': part.vrf_rt } }, 'val2': { 'operator': 'regex_match', 'val1': 'rt', - 'val2': part.vrf_rt[0] + 'val2': part.vrf_rt } } diff --git a/tests/nipaptest.py b/tests/nipaptest.py index 53808fb21..925315b87 100755 --- a/tests/nipaptest.py +++ b/tests/nipaptest.py @@ -1980,14 +1980,30 @@ def test_prefix5(self): 'val2': {'operator': 'regex_match', 'val1': 'customer_id', 'val2': 'foo-agg-1'}}, - 'val2': {'interpretation': 'expression', - 'operator': '>', - 'val1': 'vlan', - 'val2': '100'}}, - 'val2': {'interpretation': 'expression', + 'val2': { + 'interpretation': { + 'interpretation': 'expression', + 'attribute': 'vlan', + 'operator': '>', + 'string': 'vlan>100' + }, + 'operator': '>', + 'val1': 'vlan', + 'val2': '100' + } + }, + 'val2': { + 'interpretation': { + 'interpretation': 'expression', + 'attribute': 'vlan', 'operator': '<', - 'val1': 'vlan', - 'val2': '200'}} + 'string': 'vlan<200' + }, + 'operator': '<', + 'val1': 'vlan', + 'val2': '200' + } + } self.assertEqual(query, exp_query) From 3858132b12731fe7b549490fdea6ec80d61d340b Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Tue, 14 Jul 2015 23:29:58 +0200 Subject: [PATCH 10/14] Add pyparsing as dependency --- .travis.yml | 2 +- nipap/debian/control | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 954249e6c..93229e1c6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -74,7 +74,7 @@ install: - if [ "$INSTALL" == "apt" ]; then wget -O - https://spritelink.github.io/NIPAP/nipap.gpg.key | sudo apt-key add -; fi - if [ "$INSTALL" == "apt" ]; then sudo apt-get update -qq; fi # install dependencies for installing & running nipap - - if [ "$INSTALL" == "apt" ]; then sudo apt-get install -qq -y --force-yes python-pysqlite2 python-psycopg2 python-ipy python-docutils postgresql-9.1 postgresql-9.1-ip4r python-tornado python-flask python-flask-xml-rpc python-flask-compress python-parsedatetime python-tz python-dateutil python-psutil; fi + - if [ "$INSTALL" == "apt" ]; then sudo apt-get install -qq -y --force-yes python-pysqlite2 python-psycopg2 python-ipy python-docutils postgresql-9.1 postgresql-9.1-ip4r python-tornado python-flask python-flask-xml-rpc python-flask-compress python-parsedatetime python-tz python-dateutil python-psutil python-pyparsing; fi # if we are testing the upgrade, first install NIPAP packages from official repo - if [ "$INSTALL" == "apt" ] && [ "$UPGRADE" == "true" ]; then sudo apt-get install -qq nipapd nipap-www nipap-cli; fi # bump version so that we know we are upgrading beyond what is installed diff --git a/nipap/debian/control b/nipap/debian/control index 2e3cc6ba7..f9403dfeb 100644 --- a/nipap/debian/control +++ b/nipap/debian/control @@ -17,7 +17,7 @@ Description: Neat IP Address Planner Package: nipapd Architecture: all -Depends: debconf, nipap-common, python (>= 2.7), ${misc:Depends}, python-psycopg2, postgresql (>=9.1) | postgresql-9.1 | postgresql-9.2 | postgresql-9.3 | postgresql-9.4, postgresql-9.1-ip4r (>= 2.0) | postgresql-9.3-ip4r (>= 2.0) | postgresql-9.4-ip4r (>= 2.0), postgresql-contrib | postgresql-contrib-9.3, python-flask, python-flask-xml-rpc, python-flask-compress, python-tornado, python-parsedatetime, python-tz, python-dateutil, python-psutil +Depends: debconf, nipap-common, python (>= 2.7), ${misc:Depends}, python-psycopg2, postgresql (>=9.1) | postgresql-9.1 | postgresql-9.2 | postgresql-9.3 | postgresql-9.4, postgresql-9.1-ip4r (>= 2.0) | postgresql-9.3-ip4r (>= 2.0) | postgresql-9.4-ip4r (>= 2.0), postgresql-contrib | postgresql-contrib-9.3, python-flask, python-flask-xml-rpc, python-flask-compress, python-tornado, python-parsedatetime, python-tz, python-dateutil, python-psutil, python-pyparsing Description: Neat IP Address Planner XML-RPC daemon The Neat IP Address Planner, NIPAP, is a system built for efficiently managing large amounts of IP addresses. This is the XML-RPC daemon. From 5b0bc9ca3605377b1ddaf48bfb32591273824648 Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Tue, 14 Jul 2015 23:32:59 +0200 Subject: [PATCH 11/14] Add pyparsing to requirements.txt --- nipap/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/nipap/requirements.txt b/nipap/requirements.txt index edb0e360b..8be5de412 100644 --- a/nipap/requirements.txt +++ b/nipap/requirements.txt @@ -11,6 +11,7 @@ itsdangerous==0.24 parsedatetime==1.5 psutil==1.2.1 # rq.filter: >= 1.0,<2.0 psycopg2==2.6.1 +pyparsing==2.0.3 python-dateutil==2.4.2 # optional dependency on ldap #python-ldap==2.4.19 From 33a3cc99f547cd4f4b25619eb49f1f6afedb613d Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Wed, 15 Jul 2015 00:18:34 +0200 Subject: [PATCH 12/14] smart-parse: support nestled queries This now supports nestled parentheses for putting together more complex queries. Also did slight refactorization. Part of #531. --- nipap/nipap/smart_parsing.py | 68 +++++++++++++++--------------------- 1 file changed, 29 insertions(+), 39 deletions(-) diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index 6ffef875c..6f795bbde 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -122,11 +122,9 @@ def _string_to_ast(self, input_string): enclosed = Forward() parens = nestedExpr('(', ')', content=enclosed) -# enclosed << ( -# atom | parens -# ) -# TODO: enable above, we skip parentheses for now - enclosed << ( atom ) + enclosed << ( + parens | atom + ).setResultsName('nested') content = Forward() content << ( @@ -137,13 +135,27 @@ def _string_to_ast(self, input_string): return res - def _ast_to_dictsql(self, ast): + def _ast_to_dictsql(self, input_ast): """ """ - #self._logger.debug("parsing AST: " + str(ast)) - interp = [] + # Add implicit AND operator between expressions if there is no explicit + # operator specified. + ast = [] + for token, lookahead in izip_longest(input_ast, input_ast[1:]): + if token.getName() == "boolean": + # only add boolean operator if it is NOT the last token + if lookahead is not None: + ast.append(token) + continue + else: + # add non-boolean token + ast.append(token) + # if next token is boolean, continue so it can be added + if lookahead is None or lookahead.getName() == "boolean": + continue + # if next token is NOT a boolean, add implicit AND + ast.append(ParseResults('and', 'boolean')) - dse = None # dictSql stack dss = { @@ -152,8 +164,9 @@ def _ast_to_dictsql(self, ast): 'val2': None } + dse = None for part, lookahead in izip_longest(ast, ast[1:]): - self._logger.debug("part: %s %s" % (part, type(part))) + self._logger.debug("part: %s %s" % (part, part.getName())) # handle operators joining together expressions if part.getName() == 'boolean': @@ -171,13 +184,15 @@ def _ast_to_dictsql(self, ast): dse = self._parse_expr(part) else: dse = self._ast_to_dictsql(part) + elif part.getName() == 'nested': + dse = self._ast_to_dictsql(part) elif part.getName() in ('word', 'tag', 'vrf_rt'): # dict sql expression dse = self._string_to_dictsql(part) self._logger.debug('string part: %s => %s' % (part, dse)) else: - raise ParserError("Unhandled part in AST: %s" % part) - + raise ParserError("Unhandled part in AST: %s %s" % (part, + part.getName())) if dss['val1'] is None: self._logger.debug('val1 not set, using dse: %s' % str(dse)) @@ -239,30 +254,6 @@ def _parse_expr(self, part): return dictsql - def _add_implicit_ops(self, input_ast): - """ Add implicit AND operator between expressions if there is no - explicit operator specified. - """ - res_ast = [] - - for token, lookahead in izip_longest(input_ast, input_ast[1:]): - if token.getName() == "boolean": - # only add boolean operator if it is NOT the last token - if lookahead is not None: - res_ast.append(token) - continue - else: - # add non-boolean token - res_ast.append(token) - # if next token is boolean, continue so it can be added - if lookahead is None or lookahead.getName() == "boolean": - continue - # if next token is NOT a boolean, add implicit AND - res_ast.append(ParseResults('and', 'boolean')) - - return res_ast - - def parse(self, input_string): # check for unclosed quotes/parentheses @@ -274,8 +265,7 @@ def parse(self, input_string): if '(' in stripped_line or ')' in stripped_line: raise NipapValueError('Unclosed parentheses') - raw_ast = self._string_to_ast(input_string) - ast = self._add_implicit_ops(raw_ast) + ast = self._string_to_ast(input_string) return self._ast_to_dictsql(ast) @@ -742,7 +732,7 @@ class ParserError(Exception): logger = logging.getLogger() logger.setLevel(logging.DEBUG) - p = PrefixSmartParser() + p = VrfSmartParser() #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar")') #dictsql, interpretation = p.parse('core (country=SE or country = NL OR (damp AND "foo bar"))') import sys From 9a29e0238460b9298cc13b63b2448d26fa9fe9ea Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Wed, 15 Jul 2015 08:43:28 +0200 Subject: [PATCH 13/14] cli: consider 'or' as boolean operator too! The interpretation format now matches both AND and OR as boolean operators. After the switch to the new smart parser the output of --show-interpretation looks quite messed up. I think this is because the interpretation dict is structured the other way, like reversed, compared to how it looked previously. The render function needs to be updated further. Part of #531. --- nipap-cli/nipap_cli/nipap_cli.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nipap-cli/nipap_cli/nipap_cli.py b/nipap-cli/nipap_cli/nipap_cli.py index 43e9f63cb..026b41047 100755 --- a/nipap-cli/nipap_cli/nipap_cli.py +++ b/nipap-cli/nipap_cli/nipap_cli.py @@ -169,7 +169,7 @@ def _parse_interp_pool(query, indent=-5, pandop=False): text = None text2 = None andop = False - if query['operator'] == 'and': + if interp['operator'] in ['and', 'or']: andop = True elif interp['interpretation'] == 'unclosed quote': text = "%s: %s, please close quote!" % (interp['string'], interp['interpretation']) @@ -266,7 +266,7 @@ def _parse_interp_vrf(query, indent=-5, pandop=False): text = None text2 = None andop = False - if query['operator'] == 'and': + if interp['operator'] in ['and', 'or']: andop = True elif interp['interpretation'] == 'unclosed quote': text = "%s: %s, please close quote!" % (interp['string'], interp['interpretation']) @@ -337,7 +337,7 @@ def _parse_interp_prefix(query, indent=-5, pandop=False): text = None text2 = None andop = False - if query['operator'] == 'and': + if interp['operator'] in ['and', 'or']: andop = True elif interp['interpretation'] == 'unclosed quote': text = "%s: %s, please close quote!" % (interp['string'], interp['interpretation']) From cb8fc3ce1f5c65b4a3ac81b68fff865f0c4e5b7a Mon Sep 17 00:00:00 2001 From: Kristian Larsson Date: Sun, 26 Jul 2015 23:35:53 +0200 Subject: [PATCH 14/14] Raise "NotImplementedError" not "NotImplemented" --- nipap/nipap/smart_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipap/nipap/smart_parsing.py b/nipap/nipap/smart_parsing.py index 6f795bbde..db1a3be3d 100644 --- a/nipap/nipap/smart_parsing.py +++ b/nipap/nipap/smart_parsing.py @@ -223,7 +223,7 @@ def _ast_to_dictsql(self, input_ast): def _string_to_dictsql(self, string): """ Do magic matching of single words or quoted string """ - raise NotImplemented() + raise NotImplementedError() def _parse_expr(self, part):