diff --git a/README.rst b/README.rst index 15b9290..8b0802b 100755 --- a/README.rst +++ b/README.rst @@ -83,10 +83,11 @@ Dependencies + Optional: - `xtermcolor`_: Terminal color support - - `cups`_: Python bindings for libcups - - `rpyc`_: Remote Python Call (RPyC), a transparent and symmetric RPC library + - `graphviz`_: For graphic visualization (e.g., scenario display) - `paramiko`_: Python implementation of the SSHv2 protocol - `serial`_: For serial port access + - `cups`_: Python bindings for libcups + - `rpyc`_: Remote Python Call (RPyC), a transparent and symmetric RPC library + For testing: @@ -102,10 +103,11 @@ Dependencies .. _six: http://pythonhosted.org/six/ .. _sqlite3: https://www.sqlite.org/ .. _xtermcolor: https://github.com/broadinstitute/xtermcolor -.. _cups: https://pypi.python.org/pypi/pycups -.. _rpyc: https://pypi.python.org/pypi/rpyc +.. _graphviz: https://pypi.python.org/pypi/graphviz .. _paramiko: http://www.paramiko.org/ .. _serial: https://github.com/pyserial/pyserial +.. _cups: https://pypi.python.org/pypi/pycups +.. _rpyc: https://pypi.python.org/pypi/rpyc .. _ddt: https://github.com/txels/ddt .. _mock: https://pypi.python.org/pypi/mock .. _sphinx: http://sphinx-doc.org/ diff --git a/TODO b/TODO index 41b4310..fe32a81 100644 --- a/TODO +++ b/TODO @@ -1,7 +1,6 @@ [NEW FEATURES] - Add support for automatic creation of Generators that play around scenarios -- Add support for scenario visualization and JSON description of scenario - Add GDB/PIN/QEMU probes/managers - Add support for evolutionary fuzzing - Add FmkDB visualization tools diff --git a/data_models/protocols/pppoe_strategy.py b/data_models/protocols/pppoe_strategy.py index 9e67c91..44e6afa 100644 --- a/data_models/protocols/pppoe_strategy.py +++ b/data_models/protocols/pppoe_strategy.py @@ -58,7 +58,7 @@ def retrieve_X_from_feedback(env, current_step, next_step, feedback, x='padi', u raise ValueError if data is None: - return False + continue off = data.find(mac_dst) data = data[off:] result = msg_x.absorb(data, constraints=AbsNoCsts(size=True, struct=True)) @@ -169,10 +169,11 @@ def disrupt_data(self, dm, target, prev_data): return prev_data ### PADI fuzz scenario ### -step_wait_padi = NoDataStep(fbk_timeout=10, fbk_mode=Target.FBK_WAIT_UNTIL_RECV) +step_wait_padi = NoDataStep(fbk_timeout=10, fbk_mode=Target.FBK_WAIT_UNTIL_RECV, + step_desc='Wait PADI') dp_pado = DataProcess(process=[('ALT', None, UI(conf='fuzz')), - ('tTYPE', UI(init=20), UI(order=True, fuzz_mag=0.7)), + ('tTYPE', UI(init=1), UI(order=True, fuzz_mag=0.7)), 'FIX_FIELDS#pado1'], seed='pado') dp_pado.append_new_process([('ALT', None, UI(conf='fuzz')), ('tSTRUCT', UI(init=1), UI(deep=True)), 'FIX_FIELDS#pado2']) @@ -189,7 +190,7 @@ def disrupt_data(self, dm, target, prev_data): sc1.set_anchor(step_wait_padi) ### PADS fuzz scenario ### -step_wait_padi = NoDataStep(fbk_timeout=10, fbk_mode=Target.FBK_WAIT_UNTIL_RECV) +step_wait_padi = NoDataStep(fbk_timeout=10, fbk_mode=Target.FBK_WAIT_UNTIL_RECV, step_desc='Wait PADI') step_send_valid_pado = Step(DataProcess(process=[('FIX_FIELDS#pads1', None, UI(reevaluate_csts=True))], seed='pado'), fbk_timeout=0.1, fbk_mode=Target.FBK_WAIT_FULL_TIME) step_send_padt = Step(DataProcess(process=[('FIX_FIELDS#pads2', None, UI(reevaluate_csts=True))], @@ -201,7 +202,8 @@ def disrupt_data(self, dm, target, prev_data): dp_pads.append_new_process([('ALT', None, UI(conf='fuzz')), ('tSTRUCT#2', UI(init=1), UI(deep=True)), 'FIX_FIELDS#pads4']) step_send_fuzzed_pads = Step(dp_pads, fbk_timeout=0.1, fbk_mode=Target.FBK_WAIT_FULL_TIME) -step_wait_padr = NoDataStep(fbk_timeout=10, fbk_mode=Target.FBK_WAIT_UNTIL_RECV) +step_wait_padr = NoDataStep(fbk_timeout=10, fbk_mode=Target.FBK_WAIT_UNTIL_RECV, + step_desc='Wait PADR/PADI') step_wait_padi.connect_to(step_send_valid_pado, cbk_after_fbk=retrieve_padi_from_feedback) step_send_valid_pado.connect_to(step_send_fuzzed_pads, cbk_after_fbk=retrieve_padr_from_feedback_and_update) diff --git a/docs/source/conf.py b/docs/source/conf.py index f8198bb..57808b8 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -57,7 +57,7 @@ # The short X.Y version. version = '0.25' # The full version, including alpha/beta/rc tags. -release = '0.25.1' +release = '0.25.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/source/data_model.rst b/docs/source/data_model.rst index 41f7ba2..6d6e780 100644 --- a/docs/source/data_model.rst +++ b/docs/source/data_model.rst @@ -1621,7 +1621,7 @@ Example 1: The basics. {'name': 'HTTP_version_5', 'contents': INT_Str(mini=0, maxi=9)} ]} -Example 2: Introducing choice. (Refer to :ref:`dm:nt-keywords`) +Example 2: Introducing choices. (Refer to :ref:`dm:nt-keywords`) .. code-block:: python :linenos: @@ -1633,13 +1633,43 @@ Example 2: Introducing choice. (Refer to :ref:`dm:nt-keywords`) 'shape_type': MH.Pick, 'contents': [ {'name':'something_1', 'contents':INT_Str(values=[333, 444])}, - {'name':'something_1', 'contents':String(values=["foo", "bar"])}, - {'name':'something_1', 'contents':String(alphabet="0123456789",size=1)}, - {'name':'something_1', 'contents':String(alphabet="th|is", size=1)} + {'name':'something_2', 'contents':String(values=["foo", "bar"])}, + {'name':'something_3', 'contents':String(alphabet="0123456789",size=1)}, + {'name':'something_4', 'contents':String(alphabet="th|is", size=1)} ]} -Example 3: Using quantifiers and the escape character ``\``. +Example 3: Using shapes. (Refer to :ref:`dm:patterns`) + +.. code-block:: python + :linenos: + + regex = {'name': 'something', + 'contents': 'this[\d](is)|a|digit[!]'} + # is equivalent to + classic = {'name': 'something', + 'contents': [ + {'weights': 1, + 'contents': [ + {'name': 'something_1', 'contents': String(values=['this'])}, + {'name': 'something_2', 'contents': String(alphabet='0123456789')}, + {'name': 'something_3', 'contents': String(values=['is'])}, + ]}, + + {'weights': 1, + 'contents': [ + {'name': 'something_4', 'contents': String(values=['a'])}, + ]}, + + {'weights': 1, + 'contents': [ + {'name': 'something_5', 'contents': String(values=['digit'])}, + {'name': 'something_6', 'contents': String(alphabet='!')}, + ]}, + ]} + + +Example 4: Using quantifiers and the escape character ``\``. .. code-block:: python :linenos: @@ -1651,13 +1681,13 @@ Example 3: Using quantifiers and the escape character ``\``. 'contents': [ {'name': 'something_1', 'contents': String(values=["(this"])}, {'name': 'something_2', - 'contents': String(alphabet="is", min_sz=3, max_sz=4)}, + 'contents': String(alphabet="is", min_sz=3, max_sz=4)}, {'name': 'something_3', 'contents': String(values=["th"])}, {'name': 'something_4', 'qty': (1, -1), 'contents': String(values=["e"])}, {'name': 'something_5', 'contents': String(values=["end]"])} ]} -Example 4: Invalid regular expressions. +Example 5: Invalid regular expressions. .. code-block:: python :linenos: @@ -1666,6 +1696,7 @@ Example 4: Invalid regular expressions. # raise an framework.error_handling.InconvertibilityError # because there are two nested parenthesis. - error_2 = {'name': 'rejected', 'contents': '(HTTP)foo|bar'} + error_2 = {'name': 'rejected', 'contents': '(HT?TP)foo|bar'} # raise also an framework.error_handling.InconvertibilityError - # because | has priority over parenthesis in regular expressions. + # because a quantifier (that requires the creation of a terminal node) + # has been found within parenthesis. diff --git a/docs/source/disruptors.rst b/docs/source/disruptors.rst index 7a1ea2c..6236b13 100644 --- a/docs/source/disruptors.rst +++ b/docs/source/disruptors.rst @@ -52,6 +52,10 @@ Parameters: | | desc: when set to True, if a node structure has changed, the modelwalker | | will reset its walk through the children nodes | | default: True [type: bool] + |_ ign_sep + | | desc: when set to True, non-terminal separators will be ignored if + | | any are defined. + | | default: False [type: bool] |_ fix | | desc: limit constraints fixing to the nodes related to the currently | | fuzzed one (only implemented for 'sync_size_with' and diff --git a/docs/source/scenario.rst b/docs/source/scenario.rst index 5233750..e0349e8 100644 --- a/docs/source/scenario.rst +++ b/docs/source/scenario.rst @@ -105,6 +105,10 @@ a client listening on a TCP socket bound to the port 12345:: [another term] # nc -k -l 12345 +If you want to visualize your scenario, you can issue the following command +(``[FMT]`` is optional and can be ``xdot``, ``pdf``, ``png``, ...):: + + [fuddly term] >> show_scenario SC_EX1 [FMT] Finally, note that a step once executed will display a description related to what it did. You can override this description by providing the ``step_desc`` parameter of a diff --git a/framework/data_model.py b/framework/data_model.py index 5844fc8..d5dedf1 100644 --- a/framework/data_model.py +++ b/framework/data_model.py @@ -4352,13 +4352,23 @@ def unfreeze(self, conf=None, recursive=True, dont_change_state=False, ignore_en self.expanded_nodelist_sz += 1 self.expanded_nodelist = fresh_expanded_nodelist[:self.expanded_nodelist_sz] else: - # This case should not exist, a priori - self.expanded_nodelist_sz = len(fresh_expanded_nodelist) - self.expanded_nodelist = fresh_expanded_nodelist + # This case should never trigger + raise ValueError + # self.expanded_nodelist_sz = len(fresh_expanded_nodelist) + # self.expanded_nodelist = fresh_expanded_nodelist else: # assert self.expanded_nodelist_origsz > self.expanded_nodelist_sz - self.expanded_nodelist.append(fresh_expanded_nodelist[self.expanded_nodelist_sz]) - self.expanded_nodelist_sz += 1 + if self.expanded_nodelist_sz is None: + # This means that nothing has been computed yet. This is the case after + # a call to reset() which is either due to a node copy after an absorption + # or at node initialization. + # In such a case, self.expanded_nodelist should be equal to [] + assert self.expanded_nodelist == [] + self.expanded_nodelist = fresh_expanded_nodelist + self.expanded_nodelist_sz = len(fresh_expanded_nodelist) + else: + self.expanded_nodelist.append(fresh_expanded_nodelist[self.expanded_nodelist_sz]) + self.expanded_nodelist_sz += 1 else: # In this case the states are random, thus we # don't bother trying to recover the previous one @@ -5310,6 +5320,7 @@ def set_internals(self, backup): self.internals = backup.internals self.current_conf = backup.current_conf self.entangled_nodes = backup.entangled_nodes + self._delayed_jobs_called = backup._delayed_jobs_called def __check_conf(self, conf): if conf is None: diff --git a/framework/data_model_helpers.py b/framework/data_model_helpers.py index 383f66e..f1ba513 100644 --- a/framework/data_model_helpers.py +++ b/framework/data_model_helpers.py @@ -1383,19 +1383,18 @@ def advance(self, ctx): raise QuantificationError() elif ctx.input in ('}', ')', ']'): raise StructureError(ctx.input) - - elif ctx.input == '[': - return self.machine.SquareBrackets - elif ctx.input == '(': - return self.machine.Parenthesis - elif ctx.input == '.': - return self.machine.Dot - elif ctx.input == '\\': - return self.machine.Escape else: ctx.append_to_contents("") - if ctx.input == '|': + if ctx.input == '[': + return self.machine.SquareBrackets + elif ctx.input == '(': + return self.machine.Parenthesis + elif ctx.input == '.': + return self.machine.Dot + elif ctx.input == '\\': + return self.machine.Escape + elif ctx.input == '|': return self.machine.Choice elif ctx.input is None: return self.machine.Final @@ -1406,14 +1405,7 @@ def advance(self, ctx): class Choice(Initial): def _run(self, ctx): - if not ctx.choice: - # if it is still possible to build a NT with multiple shapes - if len(ctx.nodes) == 0 or (len(ctx.nodes) == 1 and ctx.buffer is None): - ctx.choice = True - else: - raise InconvertibilityError() - else: - pass + ctx.start_new_shape() @register class Final(State): @@ -1431,33 +1423,32 @@ def _run(self, ctx): ctx.append_to_buffer(ctx.input) def advance(self, ctx): - if ctx.input == '(': - return self.machine.Parenthesis - elif ctx.input == '[': - return self.machine.SquareBrackets - elif ctx.input == '.': + + if ctx.input == '.': return self.machine.Dot elif ctx.input == '\\': return self.machine.Escape elif ctx.input == '|': + + if len(ctx.current_shape) > 0: + ctx.flush() + return self.machine.Choice - elif ctx.input in ('?', '*', '+', '{'): - if ctx.choice and len(ctx.values) > 1 and len(ctx.buffer) > 1: - raise InconvertibilityError() + elif ctx.input == '(': + return self.machine.Parenthesis + elif ctx.input == '[': + return self.machine.SquareBrackets - if len(ctx.buffer) == 1: - if len(ctx.values) > 1: - content = ctx.buffer - ctx.values = ctx.values[:-1] - ctx.flush() - ctx.append_to_buffer(content) + elif ctx.input in ('?', '*', '+', '{'): - else: - content = ctx.buffer[-1] + ctx.start_new_shape_from_buffer() + + if len(ctx.buffer) > 1: + char = ctx.buffer[-1] ctx.buffer = ctx.buffer[:-1] ctx.flush() - ctx.append_to_buffer(content) + ctx.append_to_buffer(char) if ctx.input == '{': return self.machine.Brackets @@ -1490,8 +1481,7 @@ def advance(self, ctx): elif ctx.input is None: return self.machine.Final - if ctx.choice: - raise InconvertibilityError() + ctx.append_to_contents("") if ctx.input == '(': return self.machine.Parenthesis @@ -1595,8 +1585,8 @@ def advance(self, ctx): return self.machine.Choice elif ctx.input is None: return self.machine.Final - elif ctx.choice: - raise InconvertibilityError() + + ctx.append_to_contents("") if ctx.input == '(': return self.machine.Parenthesis @@ -1616,8 +1606,10 @@ class Parenthesis(StateMachine, Group): class Initial(State): def _run(self, ctx): - ctx.flush() - ctx.append_to_buffer("") + ctx.start_new_shape_from_buffer() + if len(ctx.buffer) > 0: + ctx.flush() + ctx.append_to_contents("") def advance(self, ctx): if ctx.input in ('?', '*', '+', '{'): @@ -1688,7 +1680,12 @@ class SquareBrackets(StateMachine, Group): class Initial(State): def _run(self, ctx): - ctx.flush() + ctx.start_new_shape_from_buffer() + if len(ctx.buffer) > 0: + ctx.flush() + else: + ctx.values = None + ctx.append_to_alphabet("") def advance(self, ctx): @@ -1812,18 +1809,11 @@ class EscapeMetaSequence(Group): def _run(self, ctx): - if ctx.buffer is not None: - - if ctx.choice and len(ctx.values) > 1 and len(ctx.buffer) > 1: - raise InconvertibilityError() - - if len(ctx.buffer) == 0: - - if len(ctx.values[:-1]) > 0: - ctx.values = ctx.values[:-1] - ctx.flush() - else: - ctx.flush() + ctx.start_new_shape_from_buffer() + if len(ctx.buffer) > 0: + ctx.flush() + else: + ctx.values = None ctx.append_to_alphabet(ctx.META_SEQUENCES[ctx.input]) @@ -1831,7 +1821,13 @@ def _run(self, ctx): class Dot(Group): def _run(self, ctx): - ctx.flush() + + ctx.start_new_shape_from_buffer() + if len(ctx.buffer) > 0: + ctx.flush() + else: + ctx.values = None + ctx.append_to_alphabet(ctx.get_complement("")) @@ -1842,12 +1838,26 @@ def init_specific(self): self.values = None self.alphabet = None - self.choice = False - self.min = None self.max = None - self.nodes = [] + self.shapes = [[]] + self.current_shape = self.shapes[0] + + + def start_new_shape_from_buffer(self): + if self.values is not None and len(self.values) > 1: + buffer = self.buffer + self.values = self.values[:-1] + self.flush() + + self.start_new_shape() + self.append_to_buffer(buffer) + + def start_new_shape(self): + if len(self.current_shape) > 0: + self.shapes.append([]) + self.current_shape = self.shapes[-1] def append_to_contents(self, content): if self.values is None: @@ -1892,9 +1902,15 @@ def flush(self): else: type = fvt.String - name = self._name + '_' + str(len(self.nodes) + 1) - self.nodes.append(self._create_terminal_node(name, type, values=self.values, - alphabet=self.alphabet, qty=(self.min, self.max))) + node_nb = 0 + for nodes in self.shapes: + node_nb += len(nodes) + + name = self._name + '_' + str(node_nb + 1) + self.current_shape.append(self._create_terminal_node(name, type, + values=self.values, + alphabet=self.alphabet, + qty=(self.min, self.max))) self.reset() def reset(self): @@ -1954,14 +1970,15 @@ def _create_terminal_node(self, name, type, values=None, alphabet=None, qty=None return [node, qty[0], -1 if qty[1] is None else qty[1]] def _create_non_terminal_node(self): - if self.choice: - non_terminal = [1, [MH.Copy + MH.Pick]] - else: - non_terminal = [1, [MH.Copy + MH.Ordered]] - formatted_terminal = non_terminal[1] - for terminal in self.nodes: - formatted_terminal.append(terminal) + if len(self.shapes) == 1: + non_terminal = [1, [MH.Copy + MH.Ordered] + self.shapes[0]] + elif all(len(nodes) == 1 for nodes in self.shapes): + non_terminal = [1, [MH.Copy + MH.Pick] + [nodes[0] for nodes in self.shapes]] + else: + non_terminal = [] + for nodes in self.shapes: + non_terminal += [1, [MH.Copy + MH.Ordered] + nodes] return non_terminal diff --git a/framework/database.py b/framework/database.py index 114f65d..7aa486f 100644 --- a/framework/database.py +++ b/framework/database.py @@ -399,7 +399,8 @@ def check_data_existence(self, data_id, colorized=True): return data def display_data_info(self, data_id, with_data=False, with_fbk=False, with_fmkinfo=True, - fbk_src=None, limit_data_sz=600, page_width=100, colorized=True): + fbk_src=None, limit_data_sz=None, page_width=100, colorized=True, + raw=False): colorize = self._get_color_function(colorized) @@ -502,8 +503,6 @@ def handle_dmaker(dmk_pattern, info, dmk_type, dmk_name, name_sep_sz, id_src=Non msg += colorize(str(id_src), rgb=Color.FMKSUBINFO) if info is not None: info = gr.unconvert_from_internal_repr(info) - if sys.version_info[0] > 2: - info = eval('{!a}'.format(info)) info = info.split('\n') for i in info: chks = chunk_lines(i, page_width - prefix_sz - 10) @@ -586,28 +585,25 @@ def handle_dmaker(dmk_pattern, info, dmk_type, dmk_name, name_sep_sz, id_src=Non if with_data: msg += colorize("\n Sent Data:\n", rgb=Color.FMKINFOGROUP) data_content = gr.unconvert_from_internal_repr(data_content) - if len(data_content) > limit_data_sz: - data_content = data_content[:limit_data_sz] - data_content = data_content - data_content += colorize(' ...', rgb=Color.FMKHLIGHT) - else: - data_content = data_content + data_content = self._handle_binary_content(data_content, sz_limit=limit_data_sz, raw=raw, + colorized=colorized) msg += data_content msg += colorize('\n' + line_pattern, rgb=Color.NEWLOGENTRY) if with_fbk: for src, tstamp, status, content in feedback: + formatted_ts = None if tstamp is None else tstamp.strftime("%d/%m/%Y - %H:%M:%S") msg += colorize("\n Status(", rgb=Color.FMKINFOGROUP) msg += colorize("{!s}".format(src), rgb=Color.FMKSUBINFO) msg += colorize(" | ", rgb=Color.FMKINFOGROUP) - msg += colorize("{:s}".format(tstamp.strftime("%d/%m/%Y - %H:%M:%S")), + msg += colorize("{!s}".format(formatted_ts), rgb=Color.FMKSUBINFO) msg += colorize(")", rgb=Color.FMKINFOGROUP) msg += colorize(" = {!s}".format(status), rgb=Color.FMKSUBINFO) if content: content = gr.unconvert_from_internal_repr(content) - if sys.version_info[0] > 2: - content = eval('{!a}'.format(content)) + content = self._handle_binary_content(content, sz_limit=limit_data_sz, raw=raw, + colorized=colorized) chks = chunk_lines(content, page_width - 4) for c in chks: c_sz = len(c) @@ -621,10 +617,24 @@ def handle_dmaker(dmk_pattern, info, dmk_type, dmk_name, name_sep_sz, id_src=Non prt(msg + '\n') + def _handle_binary_content(self, content, sz_limit=None, raw=False, colorized=True): + colorize = self._get_color_function(colorized) + + if sys.version_info[0] > 2: + content = content if raw else '{!a}'.format(content) + else: + content = content if raw else repr(content) + + if sz_limit is not None and len(content) > sz_limit: + content = content[:sz_limit] + content += colorize(' ...', rgb=Color.FMKHLIGHT) + + return content + def display_data_info_by_date(self, start, end, with_data=False, with_fbk=False, with_fmkinfo=True, fbk_src=None, prj_name=None, - limit_data_sz=600, page_width=100, colorized=True): + limit_data_sz=None, raw=False, page_width=100, colorized=True): colorize = self._get_color_function(colorized) if prj_name: @@ -645,7 +655,7 @@ def display_data_info_by_date(self, start, end, with_data=False, with_fbk=False, data_id = rec[0] self.display_data_info(data_id, with_data=with_data, with_fbk=with_fbk, with_fmkinfo=with_fmkinfo, fbk_src=fbk_src, - limit_data_sz=limit_data_sz, page_width=page_width, + limit_data_sz=limit_data_sz, raw=raw, page_width=page_width, colorized=colorized) else: print(colorize("*** ERROR: No data found between {!s} and {!s} ***".format(start, end), @@ -653,7 +663,7 @@ def display_data_info_by_date(self, start, end, with_data=False, with_fbk=False, def display_data_info_by_range(self, first_id, last_id, with_data=False, with_fbk=False, with_fmkinfo=True, fbk_src=None, prj_name=None, - limit_data_sz=600, page_width=100, colorized=True): + limit_data_sz=None, raw=False, page_width=100, colorized=True): colorize = self._get_color_function(colorized) @@ -675,7 +685,7 @@ def display_data_info_by_range(self, first_id, last_id, with_data=False, with_fb data_id = rec[0] self.display_data_info(data_id, with_data=with_data, with_fbk=with_fbk, with_fmkinfo=with_fmkinfo, fbk_src=fbk_src, - limit_data_sz=limit_data_sz, page_width=page_width, + limit_data_sz=limit_data_sz, raw=raw, page_width=page_width, colorized=colorized) else: print(colorize("*** ERROR: No data found between {!s} and {!s} ***".format(first_id, diff --git a/framework/fuzzing_primitives.py b/framework/fuzzing_primitives.py index 40fddfd..18d7524 100644 --- a/framework/fuzzing_primitives.py +++ b/framework/fuzzing_primitives.py @@ -698,11 +698,17 @@ def wait_for_exhaustion(self, node): class TypedNodeDisruption(NodeConsumerStub): - def init_specific(self, **kwargs): - self._internals_criteria = dm.NodeInternalsCriteria(mandatory_attrs=[dm.NodeInternals.Mutable], - negative_attrs=[dm.NodeInternals.Separator], - node_kinds=[dm.NodeInternals_TypedValue, - dm.NodeInternals_GenFunc]) + def init_specific(self, ignore_separator=False): + if ignore_separator: + self._internals_criteria = dm.NodeInternalsCriteria(mandatory_attrs=[dm.NodeInternals.Mutable], + negative_attrs=[dm.NodeInternals.Separator], + node_kinds=[dm.NodeInternals_TypedValue, + dm.NodeInternals_GenFunc]) + else: + self._internals_criteria = dm.NodeInternalsCriteria(mandatory_attrs=[dm.NodeInternals.Mutable], + node_kinds=[dm.NodeInternals_TypedValue, + dm.NodeInternals_GenFunc]) + # self.orig_value = None self.current_fuzz_vt_list = None self.current_node = None diff --git a/framework/generic_data_makers.py b/framework/generic_data_makers.py index d71fe99..7f876a3 100644 --- a/framework/generic_data_makers.py +++ b/framework/generic_data_makers.py @@ -119,9 +119,11 @@ def disrupt_data(self, dm, target, data): ' the disruptor should apply', None, str), 'order': ('when set to True, the fuzzing order is strictly guided ' \ 'by the data structure. Otherwise, fuzz weight (if specified ' \ - 'in the data model) is used for ordering', False, bool), + 'in the data model) is used for ordering', True, bool), 'deep': ('when set to True, if a node structure has changed, the modelwalker ' \ 'will reset its walk through the children nodes', True, bool), + 'ign_sep': ('when set to True, non-terminal separators will be ignored ' \ + 'if any are defined.', False, bool), 'fix_all': ('for each produced data, reevaluate the constraints on the whole graph', False, bool), 'fix': ("limit constraints fixing to the nodes related to the currently fuzzed one" @@ -148,7 +150,8 @@ def set_seed(self, prev_data): min_runs_per_node=self.min_runs_per_node, fuzz_magnitude=self.fuzz_mag, fix_constraints=self.fix, - respect_order=self.order) + respect_order=self.order, + ignore_separator=self.ign_sep) self.consumer.need_reset_when_structure_change = self.deep self.consumer.set_node_interest(path_regexp=self.path) self.modelwalker = ModelWalker(prev_data.node, self.consumer, max_steps=self.max_steps, initial_step=self.init) @@ -176,7 +179,7 @@ def disrupt_data(self, dm, target, data): corrupt_node_bytes = consumed_node.to_bytes() - data.add_info('model walking index: {:d}'.format(idx)) + data.add_info('model walking index: {:d}'.format(idx)) data.add_info(' |_ run: {:d} / {:d} (max)'.format(self.run_num, self.max_runs)) data.add_info('current fuzzed node: {!s}'.format(self.modelwalker.consumed_node_path)) data.add_info(' |_ value type: {!s}'.format(consumed_node.cc.get_value_type())) @@ -297,7 +300,7 @@ def disrupt_data(self, dm, target, data): ' the disruptor should apply', None, str), 'order': ('when set to True, the fuzzing order is strictly guided ' \ 'by the data structure. Otherwise, fuzz weight (if specified ' \ - 'in the data model) is used for ordering', False, bool), + 'in the data model) is used for ordering', True, bool), 'deep': ('when set to True, if a node structure has changed, the modelwalker ' \ 'will reset its walk through the children nodes', True, bool)}) class sd_fuzz_separator_nodes(StatefulDisruptor): @@ -461,12 +464,13 @@ def set_seed(self, prev_data): self.minmax_cst_nodelist_1.remove((n, mini, maxi)) self.minmax_cst_nodelist_2 = copy.copy(self.minmax_cst_nodelist_1) + self.minmax_cst_nodelist_3 = copy.copy(self.minmax_cst_nodelist_1) else: - self.minmax_cst_nodelist_1 = self.minmax_cst_nodelist_2 = [] + self.minmax_cst_nodelist_1 = self.minmax_cst_nodelist_2 = self.minmax_cst_nodelist_3 = [] self.max_runs = len(self.exist_cst_nodelist) + 2*len(self.size_cst_nodelist_1) + \ - 2*len(self.qty_cst_nodelist_1) + 2*len(self.minmax_cst_nodelist_1) + 2*len(self.qty_cst_nodelist_1) + 3*len(self.minmax_cst_nodelist_1) def disrupt_data(self, dm, target, data): @@ -521,6 +525,13 @@ def disrupt_data(self, dm, target, data): self.seed.env.add_node_to_corrupt(consumed_node, corrupt_type=Node.CORRUPT_NODE_QTY, corrupt_op=lambda x, y: (new_maxi, new_maxi)) op_performed = "set node amount to its maximum plus one" + elif self.deep and self.minmax_cst_nodelist_3: + consumed_node, mini, maxi = self.minmax_cst_nodelist_3.pop() + if self.idx == step_idx: + new_maxi = (maxi*10) + self.seed.env.add_node_to_corrupt(consumed_node, corrupt_type=Node.CORRUPT_NODE_QTY, + corrupt_op=lambda x, y: (new_maxi, new_maxi)) + op_performed = "set node amount to a value way beyond its maximum" else: stop = True break diff --git a/framework/global_resources.py b/framework/global_resources.py index bff8076..d12d1b5 100644 --- a/framework/global_resources.py +++ b/framework/global_resources.py @@ -31,7 +31,7 @@ from libs.utils import ensure_dir, ensure_file -fuddly_version = '0.25.1' +fuddly_version = '0.25.2' framework_folder = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) # framework_folder = os.path.dirname(framework.__file__) diff --git a/framework/monitor.py b/framework/monitor.py index 8b423b9..4ac5a15 100644 --- a/framework/monitor.py +++ b/framework/monitor.py @@ -1026,9 +1026,12 @@ class ProbeMem(Probe): Attributes: backend (Backend): backend to be used (e.g., :class:`SSH_Backend`). process_name (str): name of the process to monitor. - threshold (int): memory (RSS) threshold in bytes that the monitored process should not exceed. + threshold (int): memory (RSS) threshold that the monitored process should not exceed. + (dimension should be the same as what is provided by the `ps` command of the system + under test) tolerance (int): tolerance expressed in percentage of the memory (RSS) the process was - using at the beginning of the monitoring. + using at the beginning of the monitoring (or after each time the tolerance has been + exceeded). command_pattern (str): format string for the ssh command. '{0:s}' refer to the process name. """ @@ -1043,6 +1046,7 @@ def __init__(self): assert self.backend != None self._saved_mem = None self._max_mem = None + self._last_status_ok = None Probe.__init__(self) def _get_mem(self): @@ -1068,6 +1072,7 @@ def start(self, dm, target, logger): self.backend.start() self._max_mem = None self._saved_mem = self._get_mem() + self._last_status_ok = True self.reset() if self._saved_mem < 0: msg = "*** INIT ERROR: unable to retrieve process RSS ***\n" @@ -1109,13 +1114,20 @@ def main(self, dm, target, logger): if not ok: status.set_status(-1) status.set_private_info(err_msg+'\n'+info) + self._last_status_ok = False else: status.set_status(self._max_mem) status.set_private_info(info) + self._last_status_ok = True return status def reset(self): - if self._max_mem is not None: + if self._max_mem is not None and not self._last_status_ok: + # In this case, the memory consumption exceeds the `tolerance` ratio or the `threshold`. + # We update saved_mem with what was witnessed to avoid triggering an issue + # continuously when the tolerance ratio has been exceeded. + # Thus, in order for the probe to trigger a new issue, the + # `tolerance` ratio should be exceeded again with the new saved_mem. self._saved_mem = self._max_mem self._max_mem = self._saved_mem diff --git a/framework/plumbing.py b/framework/plumbing.py index 97f6753..8c9e93a 100644 --- a/framework/plumbing.py +++ b/framework/plumbing.py @@ -2106,6 +2106,31 @@ def show_data(self, data, verbose=True): data.node.show(raw_limit=400) self.lg.print_console('\n\n', nl_before=False) + @EnforceOrder(accepted_states=['S2']) + def show_scenario(self, sc_name, fmt='pdf'): + generators_gen = self._generic_tactics.get_generators() + generators_spe = self._tactics.get_generators() + err_msg = "The scenario '{!s}' does not exist!".format(sc_name) + + if generators_gen and sc_name in generators_gen: + generators_list = self._generic_tactics.get_generators_list(sc_name) + tactics = self._generic_tactics + elif generators_spe and sc_name in generators_spe: + generators_list = self._tactics.get_generators_list(sc_name) + tactics = self._tactics + else: + self.set_error(err_msg, code=Error.FmkWarning) + return False + + if generators_list: + cls_name = list(generators_list.keys())[0] + sc_obj = tactics.get_generator_obj(sc_name, cls_name) + if sc_obj and isinstance(sc_obj, DynGeneratorFromScenario): + sc_obj.graph_scenario(fmt=fmt) + else: + self.set_error(err_msg, code=Error.FmkWarning) + else: + self.set_error(err_msg, code=Error.FmkWarning) @EnforceOrder(accepted_states=['S2']) def show_dm_data_identifiers(self): @@ -3179,7 +3204,6 @@ def show_generators(self, dmaker_type=None): self.lg.print_console(msg, limit_output=False) self.lg.print_console('\n', nl_before=False) - @EnforceOrder(accepted_states=['S2']) def show_disruptors(self, dmaker_type=None): @@ -4482,6 +4506,35 @@ def do_show_data(self, line): self.__error = False return False + def do_show_scenario(self, line): + ''' + Show a scenario in the specific format FMT (e.g., xdot, png, pdf, ...) + |_ syntax: show_scenario SCENARIO_NAME [FMT] + + FMT defaults to 'pdf' + ''' + + self.__error = True + self.__error_msg = "Syntax Error!" + + args = line.split() + + if len(args) > 2 or len(args) < 1: + return False + + if len(args) == 2: + sc_name = args[0] + fmt = args[1] + else: + sc_name = args[0] + fmt = 'pdf' + + self.fz.show_scenario(sc_name=sc_name, fmt=fmt) + + self.__error = False + return False + + def do_replay_last(self, line): ''' Replay last data and optionnaly apply new disruptors on it diff --git a/framework/scenario.py b/framework/scenario.py index 06bbd88..7882610 100644 --- a/framework/scenario.py +++ b/framework/scenario.py @@ -25,6 +25,7 @@ from framework.global_resources import * from framework.data_model import Data +from libs.external_modules import * class DataProcess(object): def __init__(self, process, seed=None, auto_regen=False): @@ -87,6 +88,29 @@ def make_free(self): if self.outcomes is not None: self.outcomes.make_free() + def formatted_str(self, oneliner=False): + desc = '' + suffix = ', PROCESS: ' if oneliner else '\n' + if isinstance(self.seed, str): + desc += 'SEED: ' + self.seed + suffix + elif isinstance(self.seed, Data): + desc += 'SEED: Data(...)' + suffix + else: + desc += suffix[2:] + + for d in self.process: + if isinstance(d, (list, tuple)): + desc += '{!s} / '.format(d[0]) + else: + assert isinstance(d, str) + desc += '{!s} / '.format(d) + desc = desc[:-3] + + return desc + + def __repr__(self): + return self.formatted_str(oneliner=True) + def __copy__(self): new_datap = type(self)(self.process, seed=self.seed, auto_regen=self.auto_regen) new_datap._process = copy.copy(self._process) @@ -236,18 +260,18 @@ def get_data(self): d = Data('') if self._step_desc is None: if isinstance(self._data_desc, DataProcess): - step_desc = 'generate ' + repr(self._data_desc) + d.add_info(repr(self._data_desc)) elif isinstance(self._data_desc, Data): - step_desc = 'use provided Data(...)' + d.add_info('Use provided Data(...)') else: assert isinstance(self._data_desc, str) - d.add_info("instantiate a node '{:s}' from the model".format(self._node_name)) + d.add_info("Instantiate a node '{:s}' from the model".format(self._node_name)) if self._periodic_data is not None: p_sz = len(self._periodic_data) - d.add_info("set {:d} periodic{:s}".format(p_sz, 's' if p_sz > 1 else '')) + d.add_info("Set {:d} periodic{:s}".format(p_sz, 's' if p_sz > 1 else '')) if self._periodic_data_to_remove is not None: p_sz = len(self._periodic_data_to_remove) - d.add_info("clear {:d} periodic{:s}".format(p_sz, 's' if p_sz > 1 else '')) + d.add_info("Clear {:d} periodic{:s}".format(p_sz, 's' if p_sz > 1 else '')) else: d.add_info(self._step_desc) @@ -284,6 +308,25 @@ def periodic_to_clear(self): def set_transitions(self, transitions): self._transitions = transitions + def __str__(self): + if self._step_desc: + step_desc = self._step_desc + elif isinstance(self._data_desc, DataProcess): + step_desc = self._data_desc.formatted_str(oneliner=False) + elif isinstance(self._data_desc, Data): + if self.__class__.__name__ != 'Step': + step_desc = '[' + self.__class__.__name__ + ']' + else: + step_desc = 'Data(...)' + elif isinstance(self._data_desc, str): + step_desc = "{:s}".format(self._node_name.upper()) + else: + assert self._data_desc is None + step_desc = '[' + self.__class__.__name__ + ']' + + return step_desc + + def __hash__(self): return id(self) @@ -363,6 +406,14 @@ def run_callback(self, current_step, feedback=None, hook=HOOK.after_fbk): return go_on + def __str__(self): + desc = '' + for k, v in self._callbacks.items(): + desc += str(k) + '\n' + v.__name__ + '()\n' + desc = desc[:-1] + + return desc + def __hash__(self): return id(self) @@ -416,6 +467,47 @@ def periodic_to_clear(self): for pid in self._periodic_ids: yield pid + def graph(self, fmt='pdf'): + + def graph_creation(init_step, node_list, edge_list): + if init_step.final or init_step is self._anchor: + f.attr('node', fontcolor='white', shape='oval', style='rounded,filled', + fillcolor='black') + else: + f.attr('node', fontcolor='black', shape='oval', style='rounded,filled', + fillcolor='lightgrey') + f.node(str(id(init_step)), label=str(init_step)) + for idx, tr in enumerate(init_step.transitions): + if tr.step not in node_list: + if tr.step.final: + f.attr('node', fontcolor='white', shape='oval', style='rounded,filled', + fillcolor='black') + else: + f.attr('node', fontcolor='black', shape='oval', style='rounded,filled', + fillcolor='lightgrey') + f.node(str(id(tr.step)), label=str(tr.step)) + if id(tr) not in edge_list: + f.edge(str(id(init_step)), str(id(tr.step)), label='[{:d}] {!s}'.format(idx+1, tr)) + edge_list.append(id(tr)) + if tr.step in node_list: + continue + if tr.step not in node_list: + node_list.append(tr.step) + graph_creation(tr.step, node_list=node_list, edge_list=edge_list) + + if not graphviz_module: + print("\n*** ERROR: need python graphviz module to be installed ***") + return + + try: + f = graphviz.Digraph(self.name, format=fmt, + filename=os.path.join(workspace_folder, self.name+'.gv')) + except: + print("\n*** ERROR: Unknown format ({!s})! ***".format(fmt)) + else: + graph_creation(self._anchor, node_list=[], edge_list=[]) + f.view() + def __copy__(self): def graph_copy(init_step, dico): diff --git a/framework/tactics_helpers.py b/framework/tactics_helpers.py index b200683..884f34b 100644 --- a/framework/tactics_helpers.py +++ b/framework/tactics_helpers.py @@ -671,6 +671,9 @@ def produced_seed(self, val): # mechanism pass + def graph_scenario(self, fmt): + self.scenario.graph(fmt=fmt) + def setup(self, dm, user_input): if not _user_input_conformity(self, user_input, self._gen_args_desc, self._args_desc): return False diff --git a/framework/value_types.py b/framework/value_types.py index 004f734..38aba2b 100644 --- a/framework/value_types.py +++ b/framework/value_types.py @@ -903,24 +903,25 @@ def _enable_fuzz_mode(self, fuzz_magnitude=1.0): sz = len(orig_val) sz_delta_with_max = self.max_encoded_sz - sz - try: + if sz > 0: val = bp.corrupt_bits(orig_val, n=1) self.values_fuzzy.append(val) - except: - print("\n*** Value is empty! --> skipping bitflip test case ***") val = orig_val + b"A"*(sz_delta_with_max + 1) self.values_fuzzy.append(val) - self.values_fuzzy.append(b'') + if len(self.encode(orig_val)) > 0: + self.values_fuzzy.append(b'') + if sz > 0: sz_delta_with_min = sz - self.min_sz val = orig_val[:-sz_delta_with_min-1] if val != b'': self.values_fuzzy.append(val) - val = orig_val + b"X"*(self.max_sz*int(100*fuzz_magnitude)) - self.values_fuzzy.append(val) + if self.max_sz > 0: + val = orig_val + b"X"*(self.max_sz*int(100*fuzz_magnitude)) + self.values_fuzzy.append(val) self.values_fuzzy.append(b'\x00' * sz if sz > 0 else b'\x00') diff --git a/libs/external_modules.py b/libs/external_modules.py index 751dcf0..e536fe2 100644 --- a/libs/external_modules.py +++ b/libs/external_modules.py @@ -22,7 +22,9 @@ ################################################################################ try: + import xtermcolor from xtermcolor import colorize + xtermcolor.isatty = lambda x: True except ImportError: print("WARNING [FMK]: python-xtermcolor module is not installed, colors won't be available!") def colorize(string, rgb=None, ansi=None, bg=None, ansi_bg=None, fd=1): @@ -86,13 +88,19 @@ class FontStyle: UNDERLINE = '\033[4m' END = '\033[0m' +graphviz_module = True +try: + import graphviz +except ImportError: + graphviz_module = False + print('WARNING [FMK]: python(3)-graphviz module is not installed, Scenario could not be visualized!') sqlite3_module = True try: import sqlite3 except ImportError: sqlite3_module = False - print('WARNING [FMK]: SQLite3 not installed, FMKDB will not be available!') + print('WARNING [FMK]: SQLite3 not installed, FmkDB will not be available!') cups_module = True try: diff --git a/test/integration/test_integration.py b/test/integration/test_integration.py index 9f3b81f..8c1e475 100644 --- a/test/integration/test_integration.py +++ b/test/integration/test_integration.py @@ -3338,6 +3338,34 @@ def test_regex(self, regex_node_name): self.assertEqual(node_regex.to_bytes(), node_classic.to_bytes()) + @ddt.data(('(HTTP)/[0-9]\.[0-9]|this|is|it[0123456789]', [5, 1, 2]), + ('this|.is|it|[0123456789]', [1, 2, 1, 1]), + ('|this|is|it[0123456789]|\dyes\-', [1, 2, 2])) + @ddt.unpack + def test_regex_shape(self, regexp, shapes): + revisited_HTTP_version = {'name': 'HTTP_version_classic', 'contents': regexp} + + mh = ModelHelper() + node = mh.create_graph_from_desc(revisited_HTTP_version) + + excluded_idx = [] + + while True: + node_list, idx = node.cc._get_next_heavier_component(node.subnodes_csts, excluded_idx=excluded_idx) + if len(node_list) == 0: + break + excluded_idx.append(idx) + print(node_list) + try: + idx = shapes.index(len(node_list[0][1])) + except ValueError: + print(len(node_list[0][1])) + self.fail() + else: + del shapes[idx] + + self.assertEqual(len(shapes), 0) + class TestFMK(unittest.TestCase): @classmethod @@ -3503,7 +3531,7 @@ def test_scenario_infra(self): self.assertEqual(code_vector, ['DataUnusable', 'HandOver', 'DataUnusable', 'HandOver', 'DPHandOver', 'NoMoreData']) - self.assertEqual(base_qty, 53) + self.assertEqual(base_qty, 55) print('\n*** test scenario SC_AUTO_REGEN') diff --git a/test/unit/test_data_model_helpers.py b/test/unit/test_data_model_helpers.py index 8d9a3cf..ee42e0d 100644 --- a/test/unit/test_data_model_helpers.py +++ b/test/unit/test_data_model_helpers.py @@ -26,13 +26,48 @@ def tearDown(self): {'regex': "(salut))les(louloux)"}, {'regex': "(sal*ut)oo"}, {'regex': "(sal?ut)oo"}, {'regex': "sal{utoo"}, {'regex': "(sal+ut)oo"}, {'regex': "(sal{u)too"}, {'regex': "(sal{2}u)too"}, {'regex': "sal{2,1}utoo"}, {'regex': "sal(u[t]o)o"}, - {'regex': "whatever|toto?ff"}, {'regex': "whate?ver|toto"}, {'regex': "(toto)*ohoho|haha"}, - {'regex': "(toto)ohoho|haha"}, {'regex': "salut[abcd]{,15}rr"}, {'regex': "[]whatever"}, - {'regex': "t{,15}"}, {'regex': "hi|b?whatever"}, {'regex': "hi|b{3}whatever"}, - {'regex': "whatever(bar.foo)"}) + {'regex': "salut[abcd]{,15}rr"}, {'regex': "[]whatever"}, + {'regex': "t{,15}"}, {'regex': "whatever(bar.foo)"}) def test_invalid_regexes(self, regex): self.assert_regex_is_invalid(regex) + @ddt.data( + {'regex': "whatever|toto?ff", + 'nodes': [ + {'values': ['whatever']}, + {'values': ['tot']}, + {'values': ['o'], 'qty': (0, 1)}, + {'values': ['ff']}]}, + {'regex': "whate?ver|toto", + 'nodes': [ + {'values': ['what']}, + {'values': ['e'], 'qty': (0, 1)}, + {'values': ['ver']}, + {'values': ['toto']}]}, + {'regex': "(toto)*ohoho|haha", + 'nodes': [ + {'values': ['toto'], 'qty':(0, None)}, + {'values': ['ohoho']}, + {'values': ['haha']}]}, + {'regex': "(toto)ohoho|haha", + 'nodes': [ + {'values': ['toto']}, + {'values': ['ohoho']}, + {'values': ['haha']}]}, + {'regex': "hi|b?whatever", + 'nodes': [ + {'values': ['hi']}, + {'values': ['b'], 'qty': (0, 1)}, + {'values': ['whatever']}]}, + {'regex': "hi|b{3}whatever", + 'nodes': [ + {'values': ['hi']}, + {'values': ['b'], 'qty': (3, 3)}, + {'values': ['whatever']}]}, + ) + def test_shapes(self, test_case): + self.assert_regex_is_valid(test_case) + @ddt.data( {'regex': ".", 'nodes': [{"alphabet": ASCII_EXT}]}, {'regex': "this.is", @@ -93,6 +128,11 @@ def test_quantifiers(self, test_case): {"values": [u"foo-bar"]}, {"alphabet": "0123456789"}, {"alphabet": "th|is"}]}, + {'regex': u"(333|444)|foo-bar|\||[th|is]", + 'nodes': [ + {"type": fvt.INT_str, "values": [333, 444]}, + {"values": [u"foo-bar", "|"]}, + {"alphabet": "th|is"}]}, ) def test_escape(self, test_case): @@ -316,7 +356,7 @@ def assert_regex_is_valid(self, test_case): charset = test_case['charset'] if 'charset' in test_case else MH.Charset.ASCII_EXT self._parser.parse(test_case['regex'], "name", charset) - self.assertEquals(self._parser._create_terminal_node.call_count, len(test_case['nodes'])) + calls = [] nodes = test_case['nodes'] @@ -331,6 +371,8 @@ def assert_regex_is_valid(self, test_case): self._parser._create_terminal_node.assert_has_calls(calls) + self.assertEquals(self._parser._create_terminal_node.call_count, len(test_case['nodes'])) + def assert_regex_is_invalid(self, test_case): charset = test_case['charset'] if 'charset' in test_case else MH.Charset.ASCII_EXT diff --git a/tools/fmkdb.py b/tools/fmkdb.py index 2cdb9b5..0371558 100755 --- a/tools/fmkdb.py +++ b/tools/fmkdb.py @@ -74,8 +74,10 @@ group.add_argument('--with-data', action='store_true', help='Display data content (expect --info)') group.add_argument('--without-fmkinfo', action='store_true', help='Do not display fmkinfo (expect --info)') -group.add_argument('--limit', type=int, default=600, - help='Limit the size of what is displayed from data (expect --with-data)') +group.add_argument('--limit', type=int, default=None, + help='Limit the size of what is displayed from the sent data and the ' + 'retrieved feedback (expect --with-data or --with-fbk).') +group.add_argument('--raw', action='store_true', help='Display data and feedback in raw format') group = parser.add_argument_group('Fuddly Database Operations') group.add_argument('--export-data', nargs=2, metavar=('FIRST_DATA_ID','LAST_DATA_ID'), type=int, @@ -155,6 +157,7 @@ def colorize(string, rgb=None, ansi=None, bg=None, ansi_bg=None, fd=1): with_data = args.with_data without_fmkinfo = args.without_fmkinfo limit_data_sz = args.limit + raw_data = args.raw export_data = args.export_data export_one_data = args.export_one_data @@ -181,7 +184,7 @@ def colorize(string, rgb=None, ansi=None, bg=None, ansi_bg=None, fd=1): fmkdb.display_data_info(data_info, with_data=with_data, with_fbk=with_fbk, with_fmkinfo=not without_fmkinfo, fbk_src=fbk_src, - limit_data_sz=limit_data_sz, page_width=page_width, + limit_data_sz=limit_data_sz, raw=raw_data, page_width=page_width, colorized=colorized) elif data_info_by_date is not None: @@ -192,7 +195,7 @@ def colorize(string, rgb=None, ansi=None, bg=None, ansi_bg=None, fd=1): fmkdb.display_data_info_by_date(start, end, with_data=with_data, with_fbk=with_fbk, with_fmkinfo=not without_fmkinfo, fbk_src=fbk_src, prj_name=prj_name, - limit_data_sz=limit_data_sz, page_width=page_width, + limit_data_sz=limit_data_sz, raw=raw_data, page_width=page_width, colorized=colorized) elif data_info_by_range is not None: @@ -203,7 +206,7 @@ def colorize(string, rgb=None, ansi=None, bg=None, ansi_bg=None, fd=1): fmkdb.display_data_info_by_range(first_id, last_id, with_data=with_data, with_fbk=with_fbk, with_fmkinfo=not without_fmkinfo, fbk_src=fbk_src, prj_name=prj_name, - limit_data_sz=limit_data_sz, page_width=page_width, + limit_data_sz=limit_data_sz, raw=raw_data, page_width=page_width, colorized=colorized) elif export_data is not None or export_one_data is not None: