Permalink
Browse files

Merge pull request #122 from netzob/next

Merge next branch in master branch
  • Loading branch information...
gbossert committed Oct 5, 2017
2 parents 06315c5 + 0e168d4 commit 3651db08434795092376b23e1e187a3740882388
Showing with 682 additions and 1,734 deletions.
  1. +3 −5 netzob/doc/documentation/source/conf.py
  2. +2 −1 netzob/requirements.txt
  3. +43 −41 netzob/src/netzob/{Inference/Grammar/lstar/ObservationTable.py → Common/Utils/MessageCells.py}
  4. +3 −3 netzob/src/netzob/Import/PCAPImporter/ImpactDecoder.py
  5. +1 −1 netzob/src/netzob/Import/PCAPImporter/ImpactPacket.py
  6. +125 −0 netzob/src/netzob/Inference/Grammar/ActiveGrammarInferer.py
  7. +0 −602 netzob/src/netzob/Inference/Grammar/Angluin.py
  8. +0 −303 netzob/src/netzob/Inference/Grammar/EquivalenceOracles/WMethodNetworkEquivalenceOracle.py
  9. 0 netzob/src/netzob/Inference/Grammar/EquivalenceOracles/__init__.py
  10. +4 −1 netzob/src/netzob/Inference/Grammar/GenericMAT.py
  11. +0 −187 netzob/src/netzob/Inference/Grammar/LearningAlgorithm.py
  12. +0 −159 netzob/src/netzob/Inference/Grammar/MQCache.py
  13. +0 −58 netzob/src/netzob/Inference/Grammar/Oracles/AbstractOracle.py
  14. +0 −113 netzob/src/netzob/Inference/Grammar/Oracles/NetworkOracle.py
  15. 0 netzob/src/netzob/Inference/Grammar/Oracles/__init__.py
  16. +140 −0 netzob/src/netzob/Inference/Grammar/ProcessWrappers/NetworkProcessWrapper.py
  17. +217 −0 netzob/src/netzob/Inference/Grammar/ProcessWrappers/ProcessWrapper.py
  18. 0 netzob/src/netzob/Inference/Grammar/{lstar → ProcessWrappers}/__init__.py
  19. +5 −28 ...tzob/Inference/Grammar/{EquivalenceOracles/AbstractEquivalenceOracle.py → ProcessWrappers/all.py}
  20. +0 −177 netzob/src/netzob/Inference/Grammar/Queries/MembershipQuery.py
  21. 0 netzob/src/netzob/Inference/Grammar/Queries/__init__.py
  22. +2 −1 netzob/src/netzob/Inference/Grammar/all.py
  23. +2 −2 netzob/src/netzob/Inference/Vocabulary/FormatOperations/ClusterByApplicativeData.py
  24. +2 −2 netzob/src/netzob/Inference/Vocabulary/FormatOperations/ClusterBySize.py
  25. +51 −34 netzob/src/netzob/Inference/Vocabulary/RelationFinder.py
  26. +52 −4 netzob/src/netzob/Model/Vocabulary/AbstractField.py
  27. +1 −2 netzob/src/netzob/Model/Vocabulary/Domain/Parser/FlowParser.py
  28. +1 −1 netzob/src/netzob/Model/Vocabulary/Domain/Specializer/MessageSpecializer.py
  29. +3 −0 netzob/src/netzob/Model/Vocabulary/EmptySymbol.py
  30. +9 −0 netzob/src/netzob/Model/Vocabulary/Functions/EncodingFunctions/TypeEncodingFunction.py
  31. +2 −2 netzob/src/netzob/Model/Vocabulary/Symbol.py
  32. +2 −2 netzob/src/netzob/Model/Vocabulary/Types/ASCII.py
  33. +1 −2 netzob/src/netzob/Simulator/AbstractionLayer.py
  34. +3 −1 netzob/src/netzob/Simulator/Channels/SSLClient.py
  35. +8 −2 netzob/test/src/test_netzob/suite_DocTests.py
@@ -225,18 +225,16 @@
# -- Options for apidoc generation in rtfd.org----------------------------------
# Mocking system dependencies
from mock import Mock as MagicMock
from unittest.mock import MagicMock
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()
return MagicMock()
MOCK_MODULES = ['pcapy']
MOCK_MODULES = ['pcapy', 'numpy']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd:
os.system("sphinx-apidoc -T -f -o ./developer_guide/API/ ../../../src/netzob")
View
@@ -6,4 +6,5 @@ numpy
colorama==0.3.3
bintrees==2.0.0
minepy==1.0.0
arpreq==0.3.1
arpreq==0.3.1
pylstar==0.1.2
@@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
#-*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
@@ -25,55 +25,57 @@
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
#+----------------------------------------------
#| Standard library imports
#+----------------------------------------------
#+---------------------------------------------------------------------------+
#| File contributors : |
#| - Georges Bossert <gbossert (a) miskin.fr> |
#+---------------------------------------------------------------------------+
#+----------------------------------------------
#| Related third party imports
#+----------------------------------------------
#+---------------------------------------------------------------------------+
#| Standard library imports |
#+---------------------------------------------------------------------------+
from collections import OrderedDict
#+---------------------------------------------------------------------------+
#| Related third party imports |
#+---------------------------------------------------------------------------+
#+----------------------------------------------
#| Local application imports
#+----------------------------------------------
#+---------------------------------------------------------------------------+
#| Local application imports |
#+---------------------------------------------------------------------------+
from netzob.Common.Utils.Decorators import NetzobLogger
from netzob.Common.Utils.Decorators import typeCheck
@NetzobLogger
class ObservationTable(object):
"""Implementation of an Observation Table (OT) as described by Angluin in "Learning Regular Sets from Queries and Counterexamples"""
def __init__(self, alphabet):
self.alphabet = alphabet
self.__shortPrefixRows = list()
self.__longPrefixRows = list()
self.__allRows = list()
self.__allRowContents = list()
self.__canonicalRows = list()
self.__rowContentIds = dict()
self.__rowMapp = dict()
self.__numRows = 0
self.__suffixes = list()
def initialize(self, initialSuffixes, mqOracle):
if len(self.__allRows) > 0:
raise Exception(
"Called initialize, but there are already rows present")
class MessageCells(OrderedDict):
"""
This data structure overwrites the notion of OrderedDict to support additionnal attributes
such as 'headers'. This data structure has been created for the `AbstractField.getMessageCells` method
len(initialSuffixes)
self.__suffixes.extend(initialSuffixes)
>>> from netzob.all import *
>>> m = MessageCells()
>>> m[1] = "a"
>>> m[2] = "b"
>>> m[1] = m[2]
>>> list(m.items())
[(1, 'b'), (2, 'b')]
>>> m.fields = [Field(name="f1"), Field(name="f2")]
>>> [f.name for f in m.fields]
['f1', 'f2']
numLps = len(self.alphabet)
1 + numLps
"""
def __init__(self):
super().__init__()
self.fields = []
@property
def alphabet(self):
return self.__alphabet
def fields(self):
"""Fields that participate in the message cells columns"""
return self.__fields
@alphabet.setter
def alphabet(self, alphabet):
self.__alphabet = alphabet
@fields.setter
def fields(self, fields):
self.__fields = []
for f in fields:
self.__fields.append(f)
@@ -65,9 +65,9 @@ def decode(self, aBuffer):
if e.get_ether_type() == ImpactPacket.IP.ethertype:
self.ip_decoder = IPDecoder()
packet = self.ip_decoder.decode(aBuffer[off:])
elif e.get_ether_type() == ImpactPacket.ARP.ethertype:
self.arp_decoder = ARPDecoder()
packet = self.arp_decoder.decode(aBuffer[off:])
# elif e.get_ether_type() == ImpactPacket.ARP.ethertype:
# self.arp_decoder = ARPDecoder()
# packet = self.arp_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
@@ -614,7 +614,7 @@ def pop_tag(self, index=0):
def load_header(self, aBuffer):
self.tag_cnt = 0
while aBuffer[12 + 4 * self.tag_cnt:14 + 4 * self.tag_cnt] in (
'\x81\x00', '\x88\xa8', '\x91\x00'):
b'\x81\x00', b'\x88\xa8', b'\x91\x00'):
self.tag_cnt += 1
hdr_len = self.get_header_size()
@@ -0,0 +1,125 @@
# -*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011-2017 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.netzob.org |
#| @contact : contact@netzob.org |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| File contributors : |
#| - Georges Bossert <gbossert (a) miskin.fr |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Standard library imports
#+---------------------------------------------------------------------------+
import time
from threading import Thread
import os
#+---------------------------------------------------------------------------+
#| Related third party imports
#+---------------------------------------------------------------------------+
from pylstar.LSTAR import LSTAR
from pylstar.Letter import Letter
from pylstar.eqtests.RandomWalkMethod import RandomWalkMethod
#+---------------------------------------------------------------------------+
#| Local application imports
#+---------------------------------------------------------------------------+
from netzob.Common.Utils.Decorators import NetzobLogger
from netzob.Simulator.AbstractionLayer import AbstractionLayer
from netzob.Inference.Grammar.GenericMAT import GenericMAT
@NetzobLogger
class ActiveGrammarInferer(Thread):
def __init__(self, input_symbols, output_symbols, process_wrapper, channel, tmp_path=None):
Thread.__init__(self)
self.input_symbols = input_symbols
self.output_symbols = output_symbols
self.process_wrapper = process_wrapper
self.channel = channel
self.tmp_path = tmp_path
self.lstar = None
self.infered_automata = None
def stop(self):
"""
This method can be use to halt the current inference process
"""
self._logger.info("Stopping the inference process")
if self.lstar is not None:
self.lstar.stop()
if self.process_wrapper is not None:
self.process_wrapper.stop(force=True)
def run(self):
self._logger.info("Configuring the inference process")
# creates letters for each input symbols
input_letters = [Letter(s) for s in self.input_symbols]
try:
# creates an abstraction layer on top of the channel to abstract and specialize received and sent messages
abstraction_layer = AbstractionLayer(
channel=self.channel,
symbols=self.input_symbols + self.output_symbols
)
# creates a minimal adequat teacher
mat = GenericMAT(
abstraction_layer=abstraction_layer,
process_wrapper=self.process_wrapper,
cache_file_path=os.path.join(self.tmp_path, "cache.dump"))
# configures the RandomWalkMethod that will be used as an equivalence query
eqtests = RandomWalkMethod(
knowledge_base=mat,
input_letters=input_letters,
max_steps=50000,
restart_probability=0.7)
# and finally, the LSTAR algorithm
self.lstar = LSTAR(
input_vocabulary=self.input_symbols,
knowledge_base=mat,
max_states=30,
eqtests=eqtests)
# starts the inference process and stores the infered grammar in a dot file (graphviz)
self._logger.info("Starting the inference process...")
start_ts = time.time()
self.infered_automata = self.lstar.learn()
duration = time.time() - start_ts
self._logger.info("Inference process finished ({}s)".format(duration))
finally:
try:
self.process_wrapper.stop(force=True)
except Exception as e:
self._logger.info("Encountered the following error while stoping the process wrapper: {}".format(e))
Oops, something went wrong.

0 comments on commit 3651db0

Please sign in to comment.