diff --git a/coverage/README.md b/coverage/README.md index 02eb4fa3..22fe6fb5 100644 --- a/coverage/README.md +++ b/coverage/README.md @@ -16,7 +16,7 @@ Example usage: ## Intel Pin -Using a [custom pintool](coverage/pin) contributed by [Agustin Gianni](https://twitter.com/agustingianni), the Intel Pin DBI can also be used to collect coverage data. +Using a [custom pintool](pin/README.md) contributed by [Agustin Gianni](https://twitter.com/agustingianni), the Intel Pin DBI can also be used to collect coverage data. Example usage: @@ -24,11 +24,11 @@ Example usage: pin.exe -t CodeCoverage64.dll -- boombox.exe ``` -For convenience, binaries for the Windows pintool can be found on the [releases](https://github.com/gaasedelen/lighthouse/releases) page. macOS and Linux users need to compile the pintool themselves following the [instructions](coverage/pin#compilation) included with the pintool for their respective platforms. +For convenience, binaries for the Windows pintool can be found on the [releases](https://github.com/gaasedelen/lighthouse/releases) page. macOS and Linux users need to compile the pintool themselves following the [instructions](pin/README.md#compilation) included with the pintool for their respective platforms. ## Frida (Experimental) -Lighthouse offers limited support for Frida based code coverage via a custom [instrumentation script](coverage/frida) contributed by [yrp](https://twitter.com/yrp604). +Lighthouse offers limited support for Frida based code coverage via a custom [instrumentation script](frida/README.md) contributed by [yrp](https://twitter.com/yrp604). Example usage: diff --git a/coverage/pin/CodeCoverage.cpp b/coverage/pin/CodeCoverage.cpp index 8136f44a..4407f403 100644 --- a/coverage/pin/CodeCoverage.cpp +++ b/coverage/pin/CodeCoverage.cpp @@ -46,12 +46,9 @@ static std::string base_name(const std::string& path) } // Per thread data structure. This is mainly done to avoid locking. +// - Per-thread map of executed basic blocks, and their size. struct ThreadData { - // Unique list of hit basic blocks. - pintool::unordered_set m_block_hit; - - // Map basic a block address to its size. - pintool::unordered_map m_block_size; + pintool::unordered_map m_blocks; }; class ToolContext { @@ -166,24 +163,37 @@ static VOID PIN_FAST_ANALYSIS_CALL OnBasicBlockHit(THREADID tid, ADDRINT addr, U { auto& context = *reinterpret_cast(v); ThreadData* data = context.GetThreadLocalData(tid); - data->m_block_hit.insert(addr); - data->m_block_size[addr] = size; + data->m_blocks[addr] = size; + PIN_RemoveInstrumentationInRange(addr, addr); } // Trace hit event handler. static VOID OnTrace(TRACE trace, VOID* v) { auto& context = *reinterpret_cast(v); - BBL bbl = TRACE_BblHead(trace); - ADDRINT addr = BBL_Address(bbl); // Check if the address is inside a white-listed image. - if (!context.m_tracing_enabled || !context.m_images->isInterestingAddress(addr)) + if (!context.m_tracing_enabled || !context.m_images->isInterestingAddress(TRACE_Address(trace))) return; - // For each basic block in the trace. - for (; BBL_Valid(bbl); bbl = BBL_Next(bbl)) { - addr = BBL_Address(bbl); + auto tid = PIN_ThreadId(); + ThreadData* data = context.GetThreadLocalData(tid); + + // This trace is getting JIT'd, which implies the head must get executed. + auto bbl = TRACE_BblHead(trace); + auto addr = BBL_Address(bbl); + data->m_blocks[addr] = (uint16_t)BBL_Size(bbl); + + // For each basic block in the trace... + for (bbl = BBL_Next(bbl); BBL_Valid(bbl); bbl = BBL_Next(bbl)) + { + + // Ignore blocks that have already been marked as executed in the past... + ADDRINT addr = BBL_Address(bbl); + if (data->m_blocks.find(addr) != data->m_blocks.end()) + continue; + + // Instrument blocks that have not yet been executed (at least... by this thread). BBL_InsertCall(bbl, IPOINT_ANYWHERE, (AFUNPTR)OnBasicBlockHit, IARG_FAST_ANALYSIS_CALL, IARG_THREAD_ID, @@ -192,6 +202,7 @@ static VOID OnTrace(TRACE trace, VOID* v) IARG_PTR, v, IARG_END); } + } // Program finish event handler. @@ -219,7 +230,7 @@ static VOID OnFini(INT32 code, VOID* v) // Count the global number of basic blocks. size_t number_of_bbs = 0; for (const auto& data : context.m_terminated_threads) { - number_of_bbs += data->m_block_hit.size(); + number_of_bbs += data->m_blocks.size(); } context.m_trace->write_string("BB Table: %u bbs\n", number_of_bbs); @@ -233,7 +244,8 @@ static VOID OnFini(INT32 code, VOID* v) drcov_bb tmp; for (const auto& data : context.m_terminated_threads) { - for (const auto& address : data->m_block_hit) { + for (const auto& block : data->m_blocks) { + auto address = block.first; auto it = std::find_if(context.m_loaded_images.begin(), context.m_loaded_images.end(), [&address](const LoadedImage& image) { return address >= image.low_ && address < image.high_; }); @@ -243,7 +255,7 @@ static VOID OnFini(INT32 code, VOID* v) tmp.id = (uint16_t)std::distance(context.m_loaded_images.begin(), it); tmp.start = (uint32_t)(address - it->low_); - tmp.size = data->m_block_size[address]; + tmp.size = data->m_blocks[address]; context.m_trace->write_binary(&tmp, sizeof(tmp)); } diff --git a/plugin/lighthouse/__init__.py b/plugins/lighthouse/__init__.py similarity index 100% rename from plugin/lighthouse/__init__.py rename to plugins/lighthouse/__init__.py diff --git a/plugin/lighthouse/composer/__init__.py b/plugins/lighthouse/composer/__init__.py similarity index 100% rename from plugin/lighthouse/composer/__init__.py rename to plugins/lighthouse/composer/__init__.py diff --git a/plugin/lighthouse/composer/parser.py b/plugins/lighthouse/composer/parser.py similarity index 100% rename from plugin/lighthouse/composer/parser.py rename to plugins/lighthouse/composer/parser.py diff --git a/plugin/lighthouse/composer/shell.py b/plugins/lighthouse/composer/shell.py similarity index 100% rename from plugin/lighthouse/composer/shell.py rename to plugins/lighthouse/composer/shell.py diff --git a/plugin/lighthouse/context.py b/plugins/lighthouse/context.py similarity index 100% rename from plugin/lighthouse/context.py rename to plugins/lighthouse/context.py diff --git a/plugin/lighthouse/coverage.py b/plugins/lighthouse/coverage.py similarity index 100% rename from plugin/lighthouse/coverage.py rename to plugins/lighthouse/coverage.py diff --git a/plugin/lighthouse/director.py b/plugins/lighthouse/director.py similarity index 94% rename from plugin/lighthouse/director.py rename to plugins/lighthouse/director.py index 7a1de3f9..baddde38 100644 --- a/plugin/lighthouse/director.py +++ b/plugins/lighthouse/director.py @@ -67,6 +67,7 @@ def __init__(self, metadata, palette): # the coverage file parser self.reader = CoverageReader() self._target_whitelist = [] + self.suppressed_errors = set() # the name of the active coverage self.coverage_name = NEW_COMPOSITION @@ -380,6 +381,9 @@ def load_coverage_batch(self, filepaths, batch_name, progress_callback=logger.de errors = collections.defaultdict(list) aggregate_addresses = set() + # unsupress NO_COVERAGE_ERROR per-load, instead of per-session + self.suppressed_errors.discard(CoverageMissingError) + start = time.time() #---------------------------------------------------------------------- @@ -439,6 +443,9 @@ def load_coverage_files(self, filepaths, progress_callback=logger.debug): errors = collections.defaultdict(list) all_coverage = [] + # unsupress NO_COVERAGE_ERROR per-load, instead of per-session + self.suppressed_errors.discard(CoverageMissingError) + start = time.time() #---------------------------------------------------------------------- @@ -541,16 +548,39 @@ def _extract_coverage_data(self, coverage_file): if not module_name and coverage_file.modules: # - # if the user closes the dialog without selecting a name, there's - # nothing we can do for them ... + # earlier in this load, the user opted to ignore future attempts + # to alias or select coverage data. this is useful when trying to + # load a batch of coverage files, where some coverage files + # contain data, but none relevant to this database. + # + + if CoverageMissingError in self.suppressed_errors: + return [] + + # + # show the module selection dialog to the user, and wait for them + # to select something, or close the dialog # dialog = ModuleSelector(database_target, coverage_file.modules, coverage_file.filepath) - if not dialog.exec_(): - return [] # no coverage data extracted ... + result = dialog.exec_() + + # check if the user opted to ignore future warnings for missing coverage + if dialog.ignore_missing: + self.suppressed_errors.add(CoverageMissingError) + + # + # if the user closed the dialog without selecting a name, there's + # nothing we can do for them. return an empty set of coverage data + # + + if not result: + return [] # the user selected a module name! use that to extract coverage module_name = dialog.selected_name + + # the user opted to save the selected name as an 'alias' if dialog.remember_alias: self._target_whitelist.append(module_name) @@ -713,13 +743,49 @@ def _find_fuzzy_name(self, coverage_file, target_name): """ target_name = target_name.lower() + # + # 0. Pre-process module names, strip filepath if present + # + + clean_module_names = {} + for module_name_raw in coverage_file.modules: + + # trim 'path' from a 'module name' entry... if present (uncommon) + module_name = os.path.basename(module_name_raw) + + # + # if this triggers, it's probably because the coverage file is + # using full filepaths for 'module names', and that there was + # two unique filepaths with the same module name, eg: + # + # - C:\foo.dll + # - C:\bar\foo.dll + # + # this should be super rare, but we'll just revert to using the + # full / unprocessed paths and bail... + # + + if module_name in clean_module_names: + clean_module_names = {name: name for name in coverage_file.modules} + break + + clean_module_names[module_name] = module_name_raw + # # 1. exact, case-insensitive filename matching # - for module_name in coverage_file.modules: + for module_name in clean_module_names: + if target_name == module_name.lower(): + return clean_module_names[module_name] + + # + # 2. exact, case-insensitive filename matching + # + + for module_name in clean_module_names: if target_name == module_name.lower(): - return module_name + return clean_module_names[module_name] # # 2. cleave the extension from the target module name (the source) @@ -727,9 +793,9 @@ def _find_fuzzy_name(self, coverage_file, target_name): # target_name, extension = os.path.splitext(target_name) - for module_name in coverage_file.modules: + for module_name in clean_module_names: if target_name == module_name.lower(): - return module_name + return clean_module_names[module_name] # too risky to do fuzzy matching on short names... if len(target_name) < 6: @@ -737,13 +803,13 @@ def _find_fuzzy_name(self, coverage_file, target_name): # # 3. try to match *{target_name}*{extension} in module_name, assuming - # target_name is more than 6 characters and there is no othe ambiguity + # target_name is more than 6 characters and there is no other ambiguity # possible_names = [] - for module_name in coverage_file.modules: + for module_name in clean_module_names: if target_name in module_name.lower() and extension in module_name.lower(): - possible_names.append(module_name) + possible_names.append(clean_module_names[module_name]) # there were no matches on the wildcarding, so we're done if not possible_names: diff --git a/plugin/lighthouse/exceptions.py b/plugins/lighthouse/exceptions.py similarity index 97% rename from plugin/lighthouse/exceptions.py rename to plugins/lighthouse/exceptions.py index eb35490a..e87cbe2f 100644 --- a/plugin/lighthouse/exceptions.py +++ b/plugins/lighthouse/exceptions.py @@ -112,7 +112,7 @@ def __init__(self, coverage): # UI Warnings #------------------------------------------------------------------------------ -def warn_errors(errors): +def warn_errors(errors, ignore=[]): """ Warn the user of any encountered errors with a messagebox. """ @@ -131,6 +131,10 @@ def warn_errors(errors): for error in error_list: lmsg(" - %s" % error.filepath) + # suppress popups for certain errors, if the user has specified such + if error_type in ignore: + continue + # # popup a more verbose error messagebox for the user to read regarding # this class of error they encountered diff --git a/plugin/lighthouse/integration/__init__.py b/plugins/lighthouse/integration/__init__.py similarity index 100% rename from plugin/lighthouse/integration/__init__.py rename to plugins/lighthouse/integration/__init__.py diff --git a/plugin/lighthouse/integration/binja_integration.py b/plugins/lighthouse/integration/binja_integration.py similarity index 100% rename from plugin/lighthouse/integration/binja_integration.py rename to plugins/lighthouse/integration/binja_integration.py diff --git a/plugin/lighthouse/integration/binja_loader.py b/plugins/lighthouse/integration/binja_loader.py similarity index 100% rename from plugin/lighthouse/integration/binja_loader.py rename to plugins/lighthouse/integration/binja_loader.py diff --git a/plugin/lighthouse/integration/core.py b/plugins/lighthouse/integration/core.py similarity index 98% rename from plugin/lighthouse/integration/core.py rename to plugins/lighthouse/integration/core.py index e7f33574..aec06213 100644 --- a/plugin/lighthouse/integration/core.py +++ b/plugins/lighthouse/integration/core.py @@ -26,7 +26,7 @@ class LighthouseCore(object): # Plugin Metadata #-------------------------------------------------------------------------- - PLUGIN_VERSION = "0.9.0" + PLUGIN_VERSION = "0.9.1" AUTHORS = "Markus Gaasedelen" DATE = "2020" @@ -329,7 +329,7 @@ def interactive_load_batch(self, dctx=None): self.open_coverage_overview(lctx.dctx) # finally, emit any notable issues that occurred during load - warn_errors(errors) + warn_errors(errors, lctx.director.suppressed_errors) def interactive_load_file(self, dctx=None): """ @@ -399,7 +399,7 @@ def interactive_load_file(self, dctx=None): self.open_coverage_overview(lctx.dctx) # finally, emit any notable issues that occurred during load - warn_errors(errors) + warn_errors(errors, lctx.director.suppressed_errors) def check_for_update(self): """ diff --git a/plugin/lighthouse/integration/ida_integration.py b/plugins/lighthouse/integration/ida_integration.py similarity index 100% rename from plugin/lighthouse/integration/ida_integration.py rename to plugins/lighthouse/integration/ida_integration.py diff --git a/plugin/lighthouse/integration/ida_loader.py b/plugins/lighthouse/integration/ida_loader.py similarity index 100% rename from plugin/lighthouse/integration/ida_loader.py rename to plugins/lighthouse/integration/ida_loader.py diff --git a/plugin/lighthouse/metadata.py b/plugins/lighthouse/metadata.py similarity index 99% rename from plugin/lighthouse/metadata.py rename to plugins/lighthouse/metadata.py index 0a2c9d98..23f974d6 100644 --- a/plugin/lighthouse/metadata.py +++ b/plugins/lighthouse/metadata.py @@ -959,6 +959,14 @@ def _compute_complexity(self): # update the map of confirmed (walked) edges confirmed_edges[current_src] = self.edges.pop(current_src) + + # + # retain only the 'confirmed' edges. this may differ from the + # original edge map because we are only keeping edges that can be + # walked from the function entry. (eg, no ida exception handlers) + # + + self.edges = confirmed_edges # compute the final cyclomatic complexity for the function num_edges = sum(len(x) for x in itervalues(confirmed_edges)) diff --git a/plugin/lighthouse/painting/__init__.py b/plugins/lighthouse/painting/__init__.py similarity index 100% rename from plugin/lighthouse/painting/__init__.py rename to plugins/lighthouse/painting/__init__.py diff --git a/plugin/lighthouse/painting/binja_painter.py b/plugins/lighthouse/painting/binja_painter.py similarity index 100% rename from plugin/lighthouse/painting/binja_painter.py rename to plugins/lighthouse/painting/binja_painter.py diff --git a/plugin/lighthouse/painting/ida_painter.py b/plugins/lighthouse/painting/ida_painter.py similarity index 86% rename from plugin/lighthouse/painting/ida_painter.py rename to plugins/lighthouse/painting/ida_painter.py index 3301e3ea..e066b8b8 100644 --- a/plugin/lighthouse/painting/ida_painter.py +++ b/plugins/lighthouse/painting/ida_painter.py @@ -9,7 +9,7 @@ from lighthouse.util import * from lighthouse.util.disassembler import disassembler -from lighthouse.util.disassembler.ida_api import map_line2citem, map_line2node, lex_citem_indexes +from lighthouse.util.disassembler.ida_api import map_line2citem, map_line2node, lex_citem_indexes, hexrays_available from lighthouse.painting import DatabasePainter logger = logging.getLogger("Lighthouse.Painting.IDA") @@ -153,11 +153,11 @@ def _notify_status_changed(self, status): # enable / disable hook based on the painter being enabled or disabled if status: self._idp_hooks.hook() - if idaapi.init_hexrays_plugin(): + if hexrays_available(): idaapi.install_hexrays_callback(self._hxe_callback) else: self._idp_hooks.unhook() - if idaapi.init_hexrays_plugin(): + if hexrays_available(): idaapi.remove_hexrays_callback(self._hxe_callback) # send the status changed signal... @@ -216,9 +216,35 @@ def _paint_nodes(self, node_addresses): # retrieve all the necessary structures to paint this node node_coverage = db_coverage.nodes.get(node_address, None) - node_metadata = db_metadata.nodes.get(node_address, None) functions = db_metadata.get_functions_by_node(node_address) + # + # due to the fact that multiple functions may 'share' a node, + # we need to go through and explicitly fetch the node metadata + # from each function when performing a paint. + # + # this is because each function will have a unique node_id in + # the target node_metadata(s) + # + + node_metadatas = {} + for function in functions: + + # attempt to safely fetch the node metadata from a function + node_metadata = function.nodes.get(node_address, None) + + # + # this is possible if function is getting torn down. this is because + # we don't use locks. this just means it is time for us to bail as + # the metadata state is changing and the paint should be canceled + # + + if not node_metadata: + node_metadatas = [] + break + + node_metadatas[function.address] = node_metadata + # # if we did not get *everything* that we needed, then it is # possible the database changesd, or the coverage set changed... @@ -227,30 +253,23 @@ def _paint_nodes(self, node_addresses): # okay, just stop painting here and let the painter sort it out # - if not (node_coverage and node_metadata and functions): + if not (node_coverage and node_metadatas): self._msg_queue.put(self.MSG_ABORT) node_addresses = node_addresses[:node_addresses.index(node_address)] break - # - # get_functions_by_node() can return multiple functios (eg, a - # shared node) but in IDA should only ever return one... so we - # can pull it out now - # - - function_metadata = functions[0] - # ignore nodes that are only partially executed if node_coverage.instructions_executed != node_metadata.instruction_count: continue - # do the *actual* painting of a single node instance - set_node_info( - function_metadata.address, - node_metadata.id, - node_info, - node_flags - ) + # do the *actual* painting o;f a single node instance + for function_address, node_metadata in iteritems(node_metadatas): + set_node_info( + function_address, + node_metadata.id, + node_info, + node_flags + ) self._painted_nodes |= set(node_addresses) self._action_complete.set() @@ -271,32 +290,37 @@ def _clear_nodes(self, node_addresses): # loop through every node that we have metadata data for, clearing # their paint (color) in the IDA graph view as applicable. # + # read self._paint_nodes() comments for more info, the code below + # is very similar, sans the repetitive comments + # for node_address in node_addresses: - - # retrieve all the necessary structures to paint this node - node_metadata = db_metadata.nodes.get(node_address, None) functions = db_metadata.get_functions_by_node(node_address) - # - # abort if something looks like it changed... read the comments in - # self._paint_nodes for more verbose information - # + node_metadatas = {} + for function in functions: + node_metadata = function.nodes.get(node_address, None) + + if not node_metadata: + node_metadatas = {} + break - if not (node_metadata and functions): + node_metadatas[function.address] = node_metadata + + # abort if something looks like it changed... + if not node_metadatas: self._msg_queue.put(self.MSG_ABORT) node_addresses = node_addresses[:node_addresses.index(node_address)] break - function_metadata = functions[0] - # do the *actual* painting of a single node instance - set_node_info( - function_metadata.address, - node_metadata.id, - node_info, - node_flags - ) + for function_address, node_metadata in iteritems(node_metadatas): + set_node_info( + function_address, + node_metadata.id, + node_info, + node_flags + ) self._painted_nodes -= set(node_addresses) self._action_complete.set() diff --git a/plugin/lighthouse/painting/painter.py b/plugins/lighthouse/painting/painter.py similarity index 100% rename from plugin/lighthouse/painting/painter.py rename to plugins/lighthouse/painting/painter.py diff --git a/plugin/lighthouse/reader/__init__.py b/plugins/lighthouse/reader/__init__.py similarity index 100% rename from plugin/lighthouse/reader/__init__.py rename to plugins/lighthouse/reader/__init__.py diff --git a/plugin/lighthouse/reader/coverage_file.py b/plugins/lighthouse/reader/coverage_file.py similarity index 100% rename from plugin/lighthouse/reader/coverage_file.py rename to plugins/lighthouse/reader/coverage_file.py diff --git a/plugin/lighthouse/reader/coverage_reader.py b/plugins/lighthouse/reader/coverage_reader.py similarity index 100% rename from plugin/lighthouse/reader/coverage_reader.py rename to plugins/lighthouse/reader/coverage_reader.py diff --git a/plugin/lighthouse/reader/parsers/__init__.py b/plugins/lighthouse/reader/parsers/__init__.py similarity index 100% rename from plugin/lighthouse/reader/parsers/__init__.py rename to plugins/lighthouse/reader/parsers/__init__.py diff --git a/plugin/lighthouse/reader/parsers/drcov.py b/plugins/lighthouse/reader/parsers/drcov.py similarity index 100% rename from plugin/lighthouse/reader/parsers/drcov.py rename to plugins/lighthouse/reader/parsers/drcov.py diff --git a/plugin/lighthouse/reader/parsers/modoff.py b/plugins/lighthouse/reader/parsers/modoff.py similarity index 100% rename from plugin/lighthouse/reader/parsers/modoff.py rename to plugins/lighthouse/reader/parsers/modoff.py diff --git a/plugin/lighthouse/reader/parsers/trace.py b/plugins/lighthouse/reader/parsers/trace.py similarity index 100% rename from plugin/lighthouse/reader/parsers/trace.py rename to plugins/lighthouse/reader/parsers/trace.py diff --git a/plugin/lighthouse/ui/__init__.py b/plugins/lighthouse/ui/__init__.py similarity index 100% rename from plugin/lighthouse/ui/__init__.py rename to plugins/lighthouse/ui/__init__.py diff --git a/plugin/lighthouse/ui/coverage_combobox.py b/plugins/lighthouse/ui/coverage_combobox.py similarity index 100% rename from plugin/lighthouse/ui/coverage_combobox.py rename to plugins/lighthouse/ui/coverage_combobox.py diff --git a/plugin/lighthouse/ui/coverage_overview.py b/plugins/lighthouse/ui/coverage_overview.py similarity index 100% rename from plugin/lighthouse/ui/coverage_overview.py rename to plugins/lighthouse/ui/coverage_overview.py diff --git a/plugin/lighthouse/ui/coverage_settings.py b/plugins/lighthouse/ui/coverage_settings.py similarity index 100% rename from plugin/lighthouse/ui/coverage_settings.py rename to plugins/lighthouse/ui/coverage_settings.py diff --git a/plugin/lighthouse/ui/coverage_table.py b/plugins/lighthouse/ui/coverage_table.py similarity index 100% rename from plugin/lighthouse/ui/coverage_table.py rename to plugins/lighthouse/ui/coverage_table.py diff --git a/plugin/lighthouse/ui/coverage_xref.py b/plugins/lighthouse/ui/coverage_xref.py similarity index 100% rename from plugin/lighthouse/ui/coverage_xref.py rename to plugins/lighthouse/ui/coverage_xref.py diff --git a/plugin/lighthouse/ui/module_selector.py b/plugins/lighthouse/ui/module_selector.py similarity index 91% rename from plugin/lighthouse/ui/module_selector.py rename to plugins/lighthouse/ui/module_selector.py index badb8b23..14038789 100644 --- a/plugin/lighthouse/ui/module_selector.py +++ b/plugins/lighthouse/ui/module_selector.py @@ -30,11 +30,18 @@ def __init__(self, target_name, module_names, coverage_file): # dialog attributes self.selected_name = None - self.remember_alias = False # configure the widget for use self._ui_init() + @property + def remember_alias(self): + return self._checkbox_remember.isChecked() + + @property + def ignore_missing(self): + return self._checkbox_ignore_missing.isChecked() + #-------------------------------------------------------------------------- # Initialization - UI #-------------------------------------------------------------------------- @@ -82,6 +89,10 @@ def _ui_init_header(self): self._checkbox_remember = QtWidgets.QCheckBox("Remember target module alias for this session") self._checkbox_remember.setFont(self._font) + # a checkbox to ignore future 'missing coverage' / select module warnings + self._checkbox_ignore_missing = QtWidgets.QCheckBox("Suppress this dialog for the remaining coverage files") + self._checkbox_ignore_missing.setFont(self._font) + def _ui_init_table(self): """ Initialize the module selector table UI elements. @@ -134,6 +145,7 @@ def _ui_layout(self): layout.addWidget(self._label_description) layout.addWidget(self._table) layout.addWidget(self._checkbox_remember) + layout.addWidget(self._checkbox_ignore_missing) # scale widget dimensions based on DPI height = get_dpi_scale() * 250 @@ -153,5 +165,4 @@ def _ui_cell_double_click(self, row, column): A cell/row has been double clicked in the module table. """ self.selected_name = self._table.item(row, 0).text() - self.remember_alias = self._checkbox_remember.isChecked() - self.accept() + self.accept() \ No newline at end of file diff --git a/plugin/lighthouse/ui/palette.py b/plugins/lighthouse/ui/palette.py similarity index 100% rename from plugin/lighthouse/ui/palette.py rename to plugins/lighthouse/ui/palette.py diff --git a/plugin/lighthouse/ui/resources/icons/batch.png b/plugins/lighthouse/ui/resources/icons/batch.png similarity index 100% rename from plugin/lighthouse/ui/resources/icons/batch.png rename to plugins/lighthouse/ui/resources/icons/batch.png diff --git a/plugin/lighthouse/ui/resources/icons/delete_coverage.png b/plugins/lighthouse/ui/resources/icons/delete_coverage.png similarity index 100% rename from plugin/lighthouse/ui/resources/icons/delete_coverage.png rename to plugins/lighthouse/ui/resources/icons/delete_coverage.png diff --git a/plugin/lighthouse/ui/resources/icons/load.png b/plugins/lighthouse/ui/resources/icons/load.png similarity index 100% rename from plugin/lighthouse/ui/resources/icons/load.png rename to plugins/lighthouse/ui/resources/icons/load.png diff --git a/plugin/lighthouse/ui/resources/icons/overview.png b/plugins/lighthouse/ui/resources/icons/overview.png similarity index 100% rename from plugin/lighthouse/ui/resources/icons/overview.png rename to plugins/lighthouse/ui/resources/icons/overview.png diff --git a/plugin/lighthouse/ui/resources/themes/dullien.json b/plugins/lighthouse/ui/resources/themes/dullien.json similarity index 100% rename from plugin/lighthouse/ui/resources/themes/dullien.json rename to plugins/lighthouse/ui/resources/themes/dullien.json diff --git a/plugin/lighthouse/ui/resources/themes/synth.json b/plugins/lighthouse/ui/resources/themes/synth.json similarity index 100% rename from plugin/lighthouse/ui/resources/themes/synth.json rename to plugins/lighthouse/ui/resources/themes/synth.json diff --git a/plugin/lighthouse/util/__init__.py b/plugins/lighthouse/util/__init__.py similarity index 100% rename from plugin/lighthouse/util/__init__.py rename to plugins/lighthouse/util/__init__.py diff --git a/plugin/lighthouse/util/debug.py b/plugins/lighthouse/util/debug.py similarity index 100% rename from plugin/lighthouse/util/debug.py rename to plugins/lighthouse/util/debug.py diff --git a/plugin/lighthouse/util/disassembler/__init__.py b/plugins/lighthouse/util/disassembler/__init__.py similarity index 100% rename from plugin/lighthouse/util/disassembler/__init__.py rename to plugins/lighthouse/util/disassembler/__init__.py diff --git a/plugin/lighthouse/util/disassembler/api.py b/plugins/lighthouse/util/disassembler/api.py similarity index 100% rename from plugin/lighthouse/util/disassembler/api.py rename to plugins/lighthouse/util/disassembler/api.py diff --git a/plugin/lighthouse/util/disassembler/binja_api.py b/plugins/lighthouse/util/disassembler/binja_api.py similarity index 100% rename from plugin/lighthouse/util/disassembler/binja_api.py rename to plugins/lighthouse/util/disassembler/binja_api.py diff --git a/plugin/lighthouse/util/disassembler/ida_api.py b/plugins/lighthouse/util/disassembler/ida_api.py similarity index 98% rename from plugin/lighthouse/util/disassembler/ida_api.py rename to plugins/lighthouse/util/disassembler/ida_api.py index bfa72c3f..46adb893 100644 --- a/plugin/lighthouse/util/disassembler/ida_api.py +++ b/plugins/lighthouse/util/disassembler/ida_api.py @@ -411,6 +411,16 @@ def name_changed(self, address, new_name): # HexRays Util #------------------------------------------------------------------------------ +def hexrays_available(): + """ + Return True if an IDA decompiler is loaded and available for use. + """ + try: + import ida_hexrays + return ida_hexrays.init_hexrays_plugin() + except ImportError: + return False + def map_line2citem(decompilation_text): """ Map decompilation line numbers to citems. diff --git a/plugin/lighthouse/util/log.py b/plugins/lighthouse/util/log.py similarity index 96% rename from plugin/lighthouse/util/log.py rename to plugins/lighthouse/util/log.py index f7674319..aa01eea1 100644 --- a/plugin/lighthouse/util/log.py +++ b/plugins/lighthouse/util/log.py @@ -110,12 +110,10 @@ def start_logging(): # only enable logging if the LIGHTHOUSE_LOGGING environment variable is # present. we simply return a stub logger to sinkhole messages. # - # NOTE / v0.9.0: logging is enabled by default for now... - # - #if os.getenv("LIGHTHOUSE_LOGGING") == None: - # logger.disabled = True - # return logger + if os.getenv("LIGHTHOUSE_LOGGING") == None: + logger.disabled = True + return logger # create a directory for lighthouse logs if it does not exist log_dir = get_log_dir() diff --git a/plugin/lighthouse/util/misc.py b/plugins/lighthouse/util/misc.py similarity index 100% rename from plugin/lighthouse/util/misc.py rename to plugins/lighthouse/util/misc.py diff --git a/plugin/lighthouse/util/python.py b/plugins/lighthouse/util/python.py similarity index 100% rename from plugin/lighthouse/util/python.py rename to plugins/lighthouse/util/python.py diff --git a/plugin/lighthouse/util/qt/__init__.py b/plugins/lighthouse/util/qt/__init__.py similarity index 100% rename from plugin/lighthouse/util/qt/__init__.py rename to plugins/lighthouse/util/qt/__init__.py diff --git a/plugin/lighthouse/util/qt/shim.py b/plugins/lighthouse/util/qt/shim.py similarity index 100% rename from plugin/lighthouse/util/qt/shim.py rename to plugins/lighthouse/util/qt/shim.py diff --git a/plugin/lighthouse/util/qt/util.py b/plugins/lighthouse/util/qt/util.py similarity index 100% rename from plugin/lighthouse/util/qt/util.py rename to plugins/lighthouse/util/qt/util.py diff --git a/plugin/lighthouse/util/qt/waitbox.py b/plugins/lighthouse/util/qt/waitbox.py similarity index 100% rename from plugin/lighthouse/util/qt/waitbox.py rename to plugins/lighthouse/util/qt/waitbox.py diff --git a/plugin/lighthouse/util/update.py b/plugins/lighthouse/util/update.py similarity index 100% rename from plugin/lighthouse/util/update.py rename to plugins/lighthouse/util/update.py diff --git a/plugin/lighthouse_plugin.py b/plugins/lighthouse_plugin.py similarity index 100% rename from plugin/lighthouse_plugin.py rename to plugins/lighthouse_plugin.py