diff --git a/netdiff/base.py b/netdiff/base.py index 01a0de5..a4928b1 100755 --- a/netdiff/base.py +++ b/netdiff/base.py @@ -14,13 +14,14 @@ def __init__(self, old, new): self.old_graph = self._parse(old) self.new_graph = self._parse(new) - def diff(self): + def diff(self, cost=False): """ Returns netdiff in a python dictionary """ + return { - "added": self._make_diff(self.new_graph, self.old_graph), - "removed": self._make_diff(self.old_graph, self.new_graph) + "added": self._make_diff(self.new_graph, self.old_graph, cost), + "removed": self._make_diff(self.old_graph, self.new_graph, cost) } def diff_json(self, **kwargs): @@ -34,22 +35,35 @@ def diff_json(self, **kwargs): def _parse(self): raise NotImplementedError() - def _make_diff(self, old, new): + def _make_diff(self, old, new, cost): """ calculates differences between topologies 'old' and 'new' + if cost is False: No Metric is used to make the diff. + otherwise, we use cost as a tolerance factor. returns a list of links """ # make a copy of old topology to avoid tampering with it diff = old.copy() not_different = [] # loop over all links - for oedge in old.edges(): + for old_edge in old.edges(data=True): # if link is also in new topology add it to the list - for nedge in new.edges(): - if (oedge[0] == nedge[0] and oedge[1] == nedge[1]) or ( - oedge[1] == nedge[0] and oedge[0] == nedge[1]): - not_different.append(oedge) + for new_edge in new.edges(data=True): + if old_edge[0] in new_edge and old_edge[1] in new_edge: + if not cost: + not_different.append(old_edge) + else: + # check if the old link metric is inside of the + # tolerance windows + if(new_edge[2]['weight']/cost + <= old_edge[2]['weight'] <= + new_edge[2]['weight']*cost): + not_different.append(old_edge) # keep only differences diff.remove_edges_from(not_different) # return list of links - return diff.edges() + if not cost: + return diff.edges() + else: + # if cost is not false return the edges with the data + return diff.edges(data=True) diff --git a/netdiff/batman.py b/netdiff/batman.py index 80171c8..c3a3308 100755 --- a/netdiff/batman.py +++ b/netdiff/batman.py @@ -7,6 +7,8 @@ class BatmanParser(BaseParser): """ Batman Topology Parser """ def _get_primary(self, mac, collection): + # Use the ag_node structure to return the main mac address associated to + # a secondary mac, if none return itself. for node in collection: for interface in node: if mac == interface: @@ -14,33 +16,34 @@ def _get_primary(self, mac, collection): return 0 def _get_ag_node_list(self, data): - agn = [] + # Create a structure of main and secondary mac address. + ag_nodes = [] for node in data: - agi = [] - agi.append(node['primary']) + ag_interfaces = [] + ag_interfaces.append(node['primary']) if('secondary'in node): for interface in node['secondary']: - agi.append(interface) - agn.append(agi) - return agn + ag_interfaces.append(interface) + ag_nodes.append(ag_interfaces) + return ag_nodes def _parse(self, data): """ - Converts a topology in a NetworkX MultiGraph object. + Converts a topology in a NetworkX Graph object. - :param str topology: The OLSR1 topology to be converted (JSON or dict) - :return: the NetworkX MultiGraph object + :param str topology: The Batman topology to be converted (JSON or dict) + :return: the NetworkX Graph object """ # if data is not a python dict it must be a json string if type(data) is not dict: data = json.loads(data) # initialize graph and list of aggregated nodes - graph = networkx.MultiGraph() - agn = self._get_ag_node_list(data['vis']) + graph = networkx.Graph() + ag_nodes = self._get_ag_node_list(data['vis']) # loop over topology section and create networkx graph for node in data["vis"]: for neigh in node["neighbors"]: - p_neigh = self._get_primary(neigh['neighbor'], agn) + p_neigh = self._get_primary(neigh['neighbor'], ag_nodes) if not graph.has_edge(node['primary'], p_neigh): graph.add_edge(node['primary'], p_neigh, diff --git a/netdiff/olsr1.py b/netdiff/olsr1.py index 6c758fa..b669efb 100755 --- a/netdiff/olsr1.py +++ b/netdiff/olsr1.py @@ -8,16 +8,16 @@ class Olsr1Parser(BaseParser): """ OLSR v1 Topology Parser """ def _parse(self, topology): """ - Converts a topology in a NetworkX MultiGraph object. + Converts a topology in a NetworkX Graph object. :param str topology: The OLSR1 topology to be converted (JSON or dict) - :return: the NetworkX MultiGraph object + :return: the NetworkX Graph object """ # if data is not a python dict it must be a json string if type(topology) is not dict: topology = json.loads(topology) # initialize graph - graph = networkx.MultiGraph() + graph = networkx.Graph() # loop over topology section and create networkx graph for link in topology["topology"]: graph.add_edge(link["lastHopIP"], diff --git a/tests/batman/tests.py b/tests/batman/tests.py index 307803e..9aa9cb8 100755 --- a/tests/batman/tests.py +++ b/tests/batman/tests.py @@ -27,13 +27,13 @@ def test_added_removed_1_node(self): self._test_expected_links( links=result['added'], - expected_links = [ + expected_links=[ ('a0:f3:c1:96:94:10', '90:f6:52:f2:8c:2c') ] ) self._test_expected_links( links=result['removed'], - expected_links = [ + expected_links=[ ('a0:f3:c1:96:94:06', '90:f6:52:f2:8c:2c') ] ) diff --git a/tests/olsr1/3links_metric.json b/tests/olsr1/3links_metric.json new file mode 100644 index 0000000..72f88fc --- /dev/null +++ b/tests/olsr1/3links_metric.json @@ -0,0 +1,28 @@ +{ + "topology": [ + { + "destinationIP": "10.150.0.2", + "lastHopIP": "10.150.0.3", + "linkQuality": 0.195, + "neighborLinkQuality": 0.184, + "tcEdgeCost": 28334, + "validityTime": 284572 + }, + { + "destinationIP": "10.150.0.3", + "lastHopIP": "10.150.0.4", + "linkQuality": 1.0, + "neighborLinkQuality": 1.0, + "tcEdgeCost": 2048, + "validityTime": 284572 + }, + { + "destinationIP": "10.150.0.4", + "lastHopIP": "10.150.0.5", + "linkQuality": 1.0, + "neighborLinkQuality": 1.0, + "tcEdgeCost": 1024, + "validityTime": 284572 + } + ] +} diff --git a/tests/olsr1/tests.py b/tests/olsr1/tests.py index 38c0022..f225635 100755 --- a/tests/olsr1/tests.py +++ b/tests/olsr1/tests.py @@ -12,6 +12,7 @@ links2 = open('{0}/2links.json'.format(CURRENT_DIR)).read() links3 = open('{0}/3links.json'.format(CURRENT_DIR)).read() links5 = open('{0}/5links.json'.format(CURRENT_DIR)).read() +links3metric = open('{0}/3links_metric.json'.format(CURRENT_DIR)).read() class TestOlsr1Parser(TestCase): @@ -67,7 +68,7 @@ def test_simple_diff(self): # ensure 3 links added self._test_expected_links( links=result['added'], - expected_links = [ + expected_links=[ ('10.150.0.3', '10.150.0.7'), ('10.150.0.3', '10.150.0.6'), ('10.150.0.7', '10.150.0.6'), @@ -75,5 +76,20 @@ def test_simple_diff(self): ) self._test_expected_links( links=result['removed'], - expected_links = [('10.150.0.5', '10.150.0.4')] + expected_links=[('10.150.0.5', '10.150.0.4')] ) + + def test_diff_metric(self): + parser = Olsr1Parser(old=links3, new=links3metric) + result = parser.diff(cost=1) + # The metric has changed so we have -1 and +1 + self.assertEqual(len(result['added']), 1) + self.assertEqual(len(result['removed']), 1) + + def test_diff_metric_threshold(self): + parser = Olsr1Parser(old=links3, new=links3metric) + result = parser.diff(cost=2) + # The metric has changed but is inside of the threshold (2) + # no changes + self.assertEqual(len(result['added']), 0) + self.assertEqual(len(result['removed']), 0)