From 6b249619c7e71887191bdc4fc1453bfcbcbe5ef6 Mon Sep 17 00:00:00 2001 From: Moshe Date: Tue, 13 Feb 2024 15:21:38 +0200 Subject: [PATCH 01/16] added fetch assets --- .../QualysEventCollector.py | 254 +++++++++--------- .../QualysEventCollector.yml | 54 +--- .../QualysEventCollector_test.py | 107 +------- 3 files changed, 131 insertions(+), 284 deletions(-) diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py index ab2b414e7821..47e3d32bea1d 100644 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py +++ b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py @@ -12,6 +12,8 @@ """ CONSTANTS """ DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' +ASSETS_DATE_FORMAT = '%Y-%m-%d' + API_SUFFIX = "/api/2.0/fo/" VENDOR = 'qualys' PRODUCT = 'qualys' @@ -27,6 +29,8 @@ HOST_DETECTIONS_NEXT_PAGE = 'host_detections_next_page' HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN = 'host_detections_since_datetime_prev_run' HOST_LAST_FETCH = 'host_last_fetch' +ASSETS_FETCH_FROM = '90 days' +MIN_ASSETS_INTERVAL = 59 """ CLIENT CLASS """ @@ -74,9 +78,9 @@ def get_user_activity_logs(self, since_datetime: str, max_fetch: int = 0, next_p return response.text - def get_host_list_detection(self, since_datetime: str, max_fetch: int = 0, next_page=None) -> Union[str, bytes]: + def get_host_list_detection(self, next_page=None) -> Union[str, bytes]: """ - Make a http request to Qualys API to get user activities logs + Make a http request to Qualys API to get assets Args: Returns: response from Qualys API @@ -84,11 +88,11 @@ def get_host_list_detection(self, since_datetime: str, max_fetch: int = 0, next_ DemistoException: can be raised by the _http_request function """ self._headers.update({"Content-Type": 'application/json'}) + since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) params: dict[str, Any] = { - "truncation_limit": max_fetch + "truncation_limit": 3, + "vm_scan_date_after": since_datetime } - if since_datetime: - params["vm_scan_date_after"] = since_datetime if next_page: params["id_min"] = next_page @@ -100,6 +104,29 @@ def get_host_list_detection(self, since_datetime: str, max_fetch: int = 0, next_ timeout=60, error_handler=self.error_handler, ) + return response + + def get_vulnerabilities(self) -> Union[str, bytes]: + """ + Make a http request to Qualys API to get vulnerabilities + Args: + Returns: + response from Qualys API + Raises: + DemistoException: can be raised by the _http_request function + """ + self._headers.update({"Content-Type": 'application/json'}) + since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) + params: dict[str, Any] = {"last_modified_after": since_datetime} + + response = self._http_request( + method='POST', + url_suffix=urljoin(API_SUFFIX, 'knowledge_base/vuln/?action=list'), + resp_type='text', + params=params, + timeout=60, + error_handler=self.error_handler, + ) return response @@ -117,6 +144,18 @@ def get_partial_response(response: str, start: str, end: str): return result +def skip_fetch_assets(last_run): + time_to_check = last_run.get("assets_last_fetch") + if not time_to_check: + return False + passed_time = (time.time() - time_to_check) / 60 + if passed_time < MIN_ASSETS_INTERVAL: + demisto.info(f"Skipping fetch-assets command. Only {passed_time} minutes have passed since the last fetch. " + f"It should be a minimum of 1 hour.") + return True + return False + + def csv2json(csv_data: str): """ Converts data from csv to json Args: @@ -172,6 +211,23 @@ def handle_host_list_detection_result(raw_response: requests.Response) -> tuple[ return response_requested_value, response_next_url +def handle_vulnerabilities_result(raw_response: requests.Response) -> tuple[Optional[list], Optional[str]]: + """ + Handles vulnerabilities response - parses xml to json and gets the list + Args: + raw_response (requests.Response): the raw result received from Qualys API command + Returns: + List with data generated for the result given + """ + formatted_response = parse_raw_response(raw_response) + + vulnerabilities = dict_safe_get(formatted_response, ['KNOWLEDGE_BASE_VULN_LIST_OUTPUT', 'RESPONSE', 'VULN_LIST', 'Vuln']) + if isinstance(vulnerabilities, dict): + vulnerabilities = [vulnerabilities] + + return vulnerabilities + + def parse_raw_response(response: Union[bytes, requests.Response]) -> dict: """ Parses raw response from Qualys. @@ -201,21 +257,6 @@ def get_simple_response_from_raw(raw_response: Any) -> Union[Any, dict]: return simple_response -def remove_events_before_last_scan(events, last_run): - try: - edited_events = [] - for event in events: - if first_found := event.get('DETECTION', {}).get('FIRST_FOUND_DATETIME'): - if datetime.strptime(first_found, DATE_FORMAT) < datetime.strptime(last_run, DATE_FORMAT): - demisto.debug( - f'Removed event with time: {first_found}, qid: {event.get("DETECTION", {}).get("ID")}') - else: - edited_events.append(event) - return edited_events - except Exception as e: - raise Exception(f'Failed to remove previous events. Error:{str(e)}') - - def remove_last_events(events, time_to_remove, time_field): """ Removes events with certain time. Args: @@ -335,46 +376,61 @@ def get_activity_logs_events(client, since_datetime, max_fetch, next_page=None) return activity_logs_events, next_run_dict -def get_host_list_detections_events(client, last_time, max_fetch, next_page=None) -> tuple[Optional[list], dict]: +def get_host_list_detections_events(client) -> list: """ Get host list detections from qualys - We are saving the next_page param and sending next request with next_page arg if needed. Saving the newest event fetched. - We are deleting the newest event each time to avoid duplications. Args: client: Qualys client - last_time: datetime to get events from - max_fetch: max number of events to return - next_page: pagination marking Returns: - Host list detections events, Next run datetime + Host list detections assets """ - demisto.debug(f'Starting to fetch host list events: last_time={last_time}, next_page={next_page}') + demisto.debug(f'Starting to fetch assets') + assets = [] + next_page = '' + while True: + host_list_detections = client.get_host_list_detection(next_page=next_page) + host_list_assets, next_url = handle_host_list_detection_result(host_list_detections) or [] + assets += host_list_assets + next_page = get_next_page_from_url(next_url, 'id_min') + if not next_page: + break - host_list_detections = client.get_host_list_detection(since_datetime=last_time, max_fetch=max_fetch, next_page=next_page) - host_list_events, next_url = handle_host_list_detection_result(host_list_detections) or [] - newest_event_time = host_list_events[0].get('LAST_VM_SCANNED_DATE') if host_list_events else last_time + edited_host_detections = get_detections_from_hosts(assets) + demisto.debug(f'Parsed detections from hosts, got {len(edited_host_detections)=} assets.') - new_next_page = get_next_page_from_url(next_url, 'id_min') + add_fields_to_events(edited_host_detections, ['DETECTION', 'FIRST_FOUND_DATETIME'], 'host_list_detection') - if newest_event_time == last_time: - edited_host_detections = [] - new_next_page = None - else: - edited_host_detections = get_detections_from_hosts(host_list_events) - demisto.debug(f'Parsed detections from hosts, got {len(edited_host_detections)=} events.') + return edited_host_detections - edited_host_detections = remove_events_before_last_scan(edited_host_detections, last_time) - add_fields_to_events(edited_host_detections, ['DETECTION', 'FIRST_FOUND_DATETIME'], 'host_list_detection') +def get_vulnerabilities(client) -> list: + """ Get vulnerabilities list from qualys + Args: + client: Qualys client + Returns: + list vulnerabilities + """ + demisto.debug(f'Starting to fetch vulnerabilities') + host_list_detections = client.get_vulnerabilities() + vulnerabilities = handle_vulnerabilities_result(host_list_detections) or [] - next_run_dict = { - HOST_LAST_FETCH: datetime.now().strftime(DATE_FORMAT) if not new_next_page else None, - HOST_DETECTIONS_NEWEST_EVENT_DATETIME: newest_event_time, - HOST_DETECTIONS_NEXT_PAGE: new_next_page, - HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN: last_time, - } - demisto.debug(f'Done to fetch host list events: {next_run_dict=}, sending {len(edited_host_detections)} events.') + demisto.debug(f'Parsed detections from hosts, got {len(vulnerabilities)=} assets.') + + return vulnerabilities + + +def fetch_assets(client): + """ Fetches host list detections + Args: + client: command client + Return: + event: events to push to xsiam + """ + demisto.debug(f'Starting fetch for assets') + assets = get_host_list_detections_events(client) + vulnerabilities = get_vulnerabilities(client) - return edited_host_detections, next_run_dict + demisto.info(f"Sending {len(assets)} assets, and {len(vulnerabilities)} vulnerabilities to XSIAM") + return assets, vulnerabilities def fetch_events(client, last_run, first_fetch_time, fetch_function, newest_event_field, next_page_field, @@ -452,36 +508,6 @@ def get_activity_logs_events_command(client, args, first_fetch_time): return limited_activity_logs_events, results -def get_host_list_detections_events_command(client, args, first_fetch_time): - """ - Args: - client: command client - args: Demisto args for this command: limit and since_datetime - first_fetch_time: first fetch time - Retuns: - Command results with host list detections - - """ - limit = arg_to_number(args.get('limit', 50)) - offset = arg_to_number(args.get('offset', 0)) - since_datetime = arg_to_datetime(args.get('vm_scan_date_after')) - last_run = since_datetime.strftime(DATE_FORMAT) if since_datetime else first_fetch_time - - host_list_detection_events, _ = get_host_list_detections_events( - client=client, - last_time=last_run, - max_fetch=0, - ) - limited_host_list_detection_events = host_list_detection_events[offset:limit + offset] # type: ignore[index,operator] - host_list_detection_hr = tableToMarkdown(name='Host List Detection', t=limited_host_list_detection_events) - results = CommandResults( - readable_output=host_list_detection_hr, - raw_response=limited_host_list_detection_events, - ) - - return limited_host_list_detection_events, results - - def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) -> str: """ Tests API connectivity and authentication' @@ -519,26 +545,6 @@ def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) - return 'ok' -def should_run_host_detections_fetch(last_run, host_detections_fetch_interval: timedelta, datetime_now: datetime): - """ - - Args: - last_run: last run object. - host_detections_fetch_interval: host detection fetch interval. - datetime_now: time now - - Returns: True if fetch host detections interval time has passed since last time that fetch run. - - """ - if last_fetch_time := last_run.get(HOST_LAST_FETCH): - last_check_time = datetime.strptime(last_fetch_time, DATE_FORMAT) - else: - # never run host detections fetch before - return True - demisto.debug(f'Should run host detections? {last_check_time=}, {host_detections_fetch_interval=}') - return datetime_now - last_check_time > host_detections_fetch_interval - - """ MAIN FUNCTION """ @@ -548,29 +554,23 @@ def main(): # pragma: no cover command = demisto.command() base_url = params.get('url') - verify_certificate = not params.get("insecure", False) + verify_certificate = not params.get("insecure", True) proxy = params.get("proxy", False) username = params.get("credentials").get("identifier") password = params.get("credentials").get("password") - max_fetch_activity_logs = arg_to_number(params.get("max_fetch_activity_logs", 0)) - max_fetch_hosts = arg_to_number(params.get("max_fetch_hosts_detections", 0)) # How much time before the first fetch to retrieve events first_fetch_datetime: datetime = arg_to_datetime( # type: ignore[assignment] arg=params.get('first_fetch', '3 days'), arg_name='First fetch time', required=True ) - - parsed_interval = dateparser.parse(params.get('host_detections_fetch_interval', '12 hours')) or dateparser.parse('12 hours') - host_detections_fetch_interval: timedelta = (datetime.now() - parsed_interval) # type: ignore[operator] first_fetch_str = first_fetch_datetime.strftime(DATE_FORMAT) demisto.info(f'Command being called is {command}') try: - headers: dict = {"X-Requested-With": "Cortex XSIAM"} - + headers: dict = {"X-Requested-With": "Cortex"} client = Client( base_url=base_url, username=username, @@ -585,37 +585,16 @@ def main(): # pragma: no cover result = test_module(client, params, first_fetch_str) return_results(result) - elif command == "qualys-get-activity-logs": + elif command == "qualys-get-events": should_push_events = argToBoolean(args.get('should_push_events', False)) events, results = get_activity_logs_events_command(client, args, first_fetch_str) return_results(results) if should_push_events: send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT) - elif command == "qualys-get-host-detections": - should_push_events = argToBoolean(args.get('should_push_events', False)) - events, results = get_host_list_detections_events_command(client, args, first_fetch_str) - return_results(results) - if should_push_events: - send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT) - elif command == 'fetch-events': last_run = demisto.getLastRun() - host_list_detection_events = [] - host_next_run = {} - if should_run_host_detections_fetch(last_run=last_run, - host_detections_fetch_interval=host_detections_fetch_interval, - datetime_now=datetime.now()): - host_next_run, host_list_detection_events = fetch_events( - client=client, - last_run=last_run, - newest_event_field=HOST_DETECTIONS_NEWEST_EVENT_DATETIME, - next_page_field=HOST_DETECTIONS_NEXT_PAGE, - previous_run_time_field=HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN, - fetch_function=get_host_list_detections_events, - first_fetch_time=first_fetch_str, - max_fetch=max_fetch_hosts, - ) + max_fetch_activity_logs = arg_to_number(params.get("max_fetch_activity_logs", 0)) logs_next_run, activity_logs_events = fetch_events( client=client, last_run=last_run, @@ -626,14 +605,21 @@ def main(): # pragma: no cover first_fetch_time=first_fetch_str, max_fetch=max_fetch_activity_logs, ) - send_events_to_xsiam(activity_logs_events + host_list_detection_events, vendor=VENDOR, product=PRODUCT) + send_events_to_xsiam(activity_logs_events, vendor=VENDOR, product=PRODUCT) # saves next_run for the time fetch-events is invoked - last_run.update(logs_next_run) - last_run.update(host_next_run) - demisto.setLastRun(last_run) + demisto.setLastRun(logs_next_run) + + elif command == 'fetch-assets': + assets_last_run = demisto.getAssetsLastRun() + demisto.debug(f'saved lastrun assets: {assets_last_run}') + if skip_fetch_assets(assets_last_run): + return + demisto.setAssetsLastRun({'assets_last_fetch': time.time()}) + assets, vulnerabilities = fetch_assets(client=client) + send_data_to_xsiam(data=assets, vendor=VENDOR, product='host_detections', data_type='assets') + send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities') - # Log exceptions and return errors except Exception as e: return_error(f'Failed to execute {command} command.\nError:\n{str(e)}') diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml index cae1dfe6e50d..f9820aa20c6e 100644 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml +++ b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml @@ -22,49 +22,29 @@ configuration: required: false type: 8 section: Connect + advanced: true - display: Use system proxy settings name: proxy required: false type: 8 section: Connect + advanced: true - defaultvalue: 3 days section: Collect - display: First fetch time + display: First event fetch time name: first_fetch - additionalinfo: If "First Fetch Time" is set for a long time ago, it may cause performance issues. - required: true - type: 0 -- display: Vulnerability Fetch Interval - additionalinfo: Time between fetches of vulnerabilities (for example 12 hours, 60 minutes, etc.). - name: host_detections_fetch_interval + additionalinfo: If "First event fetch time" is set for a long time ago, it may cause performance issues. required: true - section: Collect - defaultvalue: 12 hours type: 0 -- section: Collect - advanced: true - display: Activity Logs Fetch Interval - additionalinfo: Time between fetches of activity logs. - name: eventFetchInterval - defaultvalue: "1" - type: 19 - required: false - defaultvalue: 10000 section: Collect - display: Activity Logs Fetch Limit + display: Event Fetch Limit name: max_fetch_activity_logs - additionalinfo: Maximum number of activity logs to fetch per fetch iteration. - required: true - type: 0 -- defaultvalue: 1000 - section: Collect - display: Host Detections Fetch Limit - name: max_fetch_hosts_detections - additionalinfo: Maximum number of hosts to return in a single fetch iteration of host detections. Since each host may have multiple detections, it is likely that more events than the specified number will be fetched. + additionalinfo: Maximum number of events to fetch per fetch iteration. required: true type: 0 description: Qualys Event Collector fetches Activity Logs (Audit Logs) and Host Vulnerabilities. -display: Qualys Event Collector +display: Qualys VMDR name: QualysEventCollector script: commands: @@ -84,26 +64,10 @@ script: - description: Offset which events to return. name: offset description: Gets activity logs from Qualys. - name: qualys-get-activity-logs - - arguments: - - auto: PREDEFINED - defaultValue: 'false' - description: If true, the command will create events, otherwise it will only display them. - name: should_push_events - predefined: - - 'true' - - 'false' - required: true - - description: Maximum number of results to return. - name: limit - - description: Offset which events to return. - name: offset - - description: Date to return results from. - name: vm_scan_date_after - description: Gets host detections from Qualys. - name: qualys-get-host-detections + name: qualys-get-events dockerimage: demisto/python3:3.10.13.84405 isfetchevents: true + isfetchassets: true runonce: false script: '' subtype: python3 diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py index 833bb99912ef..8c0289465d24 100644 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py +++ b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py @@ -2,8 +2,8 @@ import pytest from CommonServerPython import * # noqa: F401 -from QualysEventCollector import get_activity_logs_events_command, get_host_list_detections_events_command, \ - Client, fetch_events, get_host_list_detections_events, get_activity_logs_events, should_run_host_detections_fetch +from QualysEventCollector import get_activity_logs_events_command, \ + Client, fetch_events, get_host_list_detections_events, get_activity_logs_events ACTIVITY_LOGS_NEWEST_EVENT_DATETIME = 'activity_logs_newest_event_datetime' ACTIVITY_LOGS_NEXT_PAGE = 'activity_logs_next_page' @@ -52,61 +52,6 @@ def test_get_activity_logs_events_command(requests_mock): assert len(activity_logs_events) == 17 -def test_get_host_list_detections_events_command(requests_mock): - """ - Given: - - host_list_detections_events_command - - When: - - Want to list all existing incidents - Then: - - Ensure List Host Detections Results in human-readable, and number of results reasonable. - """ - base_url = 'https://server_url/' - with open('./test_data/host_list_detections_raw.xml') as f: - logs = f.read() - requests_mock.get(f'{base_url}api/2.0/fo/asset/host/vm/detection/' - f'?action=list&truncation_limit=0&vm_scan_date_after=2023-03-01T00%3A00%3A00Z', text=logs) - client = Client(base_url=base_url, - verify=True, - headers={}, - proxy=False, - username='demisto', - password='demisto', - ) - args = {'limit': 50, 'vm_scan_date_after': '1 March 2023'} - first_fetch = '2022-03-21T03:42:05Z' - host_events, results = get_host_list_detections_events_command(client, args, first_fetch) - assert 'Host List Detection' in results.readable_output - assert len(host_events) == 8 - - -@pytest.mark.parametrize('last_run, fetch_interval_param, expected_should_run', [ - ('2023-05-24T11:55:35Z', '2023-05-24 00:00:00', False), - ('2023-05-23T11:55:35Z', '2023-05-24 00:00:00', True), - ({}, '2023-05-24 11:00:00', True), -]) -def test_should_run_host_detections_fetch(last_run, fetch_interval_param, expected_should_run): - """ - Given: - - should_run_host_detections_fetch command (fetches detections) - - When: - - Running fetch-events command and need to decide whether to fetch host detections - - Then: - - Ensure the expected result - """ - datetime_now = datetime.strptime('2023-05-24 12:00:00', '%Y-%m-%d %H:%M:%S') - delta = datetime.strptime(fetch_interval_param, '%Y-%m-%d %H:%M:%S') - fetch_interval = datetime_now - delta - last_run_dict = {'host_last_fetch': last_run} - should_run = should_run_host_detections_fetch(last_run=last_run_dict, - host_detections_fetch_interval=fetch_interval, - datetime_now=datetime_now) - assert should_run == expected_should_run - - @pytest.mark.parametrize('activity_log_last_run, logs_number, add_footer', [(None, 17, True), ("2023-05-24T09:55:35Z", 0, True), @@ -169,51 +114,3 @@ def test_fetch_logs_events_command(requests_mock, activity_log_last_run, logs_nu assert logs_next_run.get(ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN) == activity_log_last_run or first_fetch_str assert logs_next_run.get(ACTIVITY_LOGS_NEWEST_EVENT_DATETIME) == "2023-05-24T09:55:35Z" - -@freezegun.freeze_time('2023-05-16 16:00:00') -@pytest.mark.parametrize('host_last_run,detections_number', - [(None, 8), - ("2023-05-16T15:26:53Z", 4), - ("2023-05-14T15:04:55Z", 7)]) -def test_fetch_detection_events_command(requests_mock, host_last_run, detections_number): - """ - Given: - - fetch events command (fetches detections) - - When: - - Running fetch-events command - - Then: - - Ensure number of events fetched - """ - first_fetch_str = '2022-12-21T03:42:05Z' - base_url = 'https://server_url/' - truncation_limit = 10 - with open('./test_data/host_list_detections_raw.xml') as f: - hosts = f.read() - requests_mock.get(f'{base_url}api/2.0/fo/asset/host/vm/detection/' - f'?action=list&truncation_limit={truncation_limit}' - f'&vm_scan_date_after={host_last_run if host_last_run else first_fetch_str}', text=hosts) - client = Client( - base_url=base_url, - verify=True, - headers={}, - proxy=False, - username='demisto', - password='demisto', - ) - last_run = {HOST_DETECTIONS_NEWEST_EVENT_DATETIME: host_last_run} - host_next_run, host_list_detection_events = fetch_events( - client=client, - last_run=last_run, - newest_event_field=HOST_DETECTIONS_NEWEST_EVENT_DATETIME, - next_page_field=HOST_DETECTIONS_NEXT_PAGE, - previous_run_time_field=HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN, - fetch_function=get_host_list_detections_events, - first_fetch_time=first_fetch_str, - max_fetch=truncation_limit, - ) - - assert len(host_list_detection_events) == detections_number - assert host_next_run.get(HOST_DETECTIONS_NEWEST_EVENT_DATETIME) == '2023-05-16T15:26:01Z' - assert host_next_run.get(HOST_LAST_FETCH) == '2023-05-16T16:00:00Z' From 6ed2662d2a938824f3707c2ad45ec8efa034259f Mon Sep 17 00:00:00 2001 From: Moshe Date: Mon, 26 Feb 2024 11:29:31 +0200 Subject: [PATCH 02/16] added test --- .../QualysEventCollector.py | 14 +- .../QualysEventCollector_test.py | 34 ++++- .../test_data/vulnerabilities_raw.xml | 130 ++++++++++++++++++ 3 files changed, 166 insertions(+), 12 deletions(-) create mode 100644 Packs/qualys/Integrations/QualysEventCollector/test_data/vulnerabilities_raw.xml diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py index 47e3d32bea1d..62a3e9ab8315 100644 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py +++ b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py @@ -221,7 +221,7 @@ def handle_vulnerabilities_result(raw_response: requests.Response) -> tuple[Opti """ formatted_response = parse_raw_response(raw_response) - vulnerabilities = dict_safe_get(formatted_response, ['KNOWLEDGE_BASE_VULN_LIST_OUTPUT', 'RESPONSE', 'VULN_LIST', 'Vuln']) + vulnerabilities = dict_safe_get(formatted_response, ['KNOWLEDGE_BASE_VULN_LIST_OUTPUT', 'RESPONSE', 'VULN_LIST', 'VULN']) if isinstance(vulnerabilities, dict): vulnerabilities = [vulnerabilities] @@ -531,16 +531,8 @@ def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) - next_page_field=ACTIVITY_LOGS_NEXT_PAGE, previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, ) - fetch_events( - client=client, - last_run={}, - first_fetch_time=first_fetch_time, - max_fetch=1, - fetch_function=get_host_list_detections_events, - newest_event_field=HOST_DETECTIONS_NEWEST_EVENT_DATETIME, - next_page_field=HOST_DETECTIONS_NEXT_PAGE, - previous_run_time_field=HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN, - ) + + fetch_assets(client=client) return 'ok' diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py index 8c0289465d24..9d39953002ac 100644 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py +++ b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py @@ -3,7 +3,7 @@ from CommonServerPython import * # noqa: F401 from QualysEventCollector import get_activity_logs_events_command, \ - Client, fetch_events, get_host_list_detections_events, get_activity_logs_events + Client, fetch_events, get_activity_logs_events, fetch_assets, ASSETS_FETCH_FROM, ASSETS_DATE_FORMAT ACTIVITY_LOGS_NEWEST_EVENT_DATETIME = 'activity_logs_newest_event_datetime' ACTIVITY_LOGS_NEXT_PAGE = 'activity_logs_next_page' @@ -114,3 +114,35 @@ def test_fetch_logs_events_command(requests_mock, activity_log_last_run, logs_nu assert logs_next_run.get(ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN) == activity_log_last_run or first_fetch_str assert logs_next_run.get(ACTIVITY_LOGS_NEWEST_EVENT_DATETIME) == "2023-05-24T09:55:35Z" + +def test_fetch_assets_command(requests_mock): + """ + Given: + - fetch_assets_command + When: + - Want to list all existing incidents + Then: + - Ensure List assets and vulnerabilities. + """ + base_url = 'https://server_url/' + with open('./test_data/host_list_detections_raw.xml') as f: + assets = f.read() + with open('./test_data/vulnerabilities_raw.xml') as f: + vulnerabilities = f.read() + requests_mock.get(f'{base_url}api/2.0/fo/asset/host/vm/detection/' + f'?action=list&truncation_limit=3&vm_scan_date_after={arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', text=assets) + + requests_mock.post(f'{base_url}api/2.0/fo/knowledge_base/vuln/' + f'?action=list&last_modified_after={arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', text=vulnerabilities) + + client = Client(base_url=base_url, + verify=True, + headers={}, + proxy=False, + username='demisto', + password='demisto', + ) + assets, vulnerabilities = fetch_assets(client=client) + + assert len(assets) == 8 + assert len(vulnerabilities) == 2 diff --git a/Packs/qualys/Integrations/QualysEventCollector/test_data/vulnerabilities_raw.xml b/Packs/qualys/Integrations/QualysEventCollector/test_data/vulnerabilities_raw.xml new file mode 100644 index 000000000000..7dd200b979de --- /dev/null +++ b/Packs/qualys/Integrations/QualysEventCollector/test_data/vulnerabilities_raw.xml @@ -0,0 +1,130 @@ + + + + + + 2024-02-12T15:21:40Z + + + 10052 + Vulnerability + 3 + <![CDATA[Vtecrm Vtenext Multiple Security Vulnerabilities]]> + CGI + 2023-12-19T12:02:30Z + 2021-01-21T12:51:22Z + 0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + + + + 1 + Exploit Available + + + + 10186 + Vulnerability + 2 + <![CDATA[]]> + CGI + 2024-02-10T12:03:39Z + 2000-11-10T11:00:00Z + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + 1 + Patch Available, Exploit Available + + + + + + + From 45d95eb70078b2a80ba235e56ad3e710dca4161e Mon Sep 17 00:00:00 2001 From: Moshe Date: Mon, 4 Mar 2024 11:59:55 +0200 Subject: [PATCH 03/16] fixed cr comments --- .../QualysEventCollector.py | 45 +++++++++++-------- 1 file changed, 27 insertions(+), 18 deletions(-) diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py index 62a3e9ab8315..56873fe515c9 100644 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py +++ b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py @@ -78,7 +78,7 @@ def get_user_activity_logs(self, since_datetime: str, max_fetch: int = 0, next_p return response.text - def get_host_list_detection(self, next_page=None) -> Union[str, bytes]: + def get_host_list_detection(self, since_datetime, next_page=None) -> Union[str, bytes]: """ Make a http request to Qualys API to get assets Args: @@ -88,7 +88,6 @@ def get_host_list_detection(self, next_page=None) -> Union[str, bytes]: DemistoException: can be raised by the _http_request function """ self._headers.update({"Content-Type": 'application/json'}) - since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) params: dict[str, Any] = { "truncation_limit": 3, "vm_scan_date_after": since_datetime @@ -106,7 +105,7 @@ def get_host_list_detection(self, next_page=None) -> Union[str, bytes]: ) return response - def get_vulnerabilities(self) -> Union[str, bytes]: + def get_vulnerabilities(self, since_datetime) -> Union[str, bytes]: """ Make a http request to Qualys API to get vulnerabilities Args: @@ -116,7 +115,6 @@ def get_vulnerabilities(self) -> Union[str, bytes]: DemistoException: can be raised by the _http_request function """ self._headers.update({"Content-Type": 'application/json'}) - since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) params: dict[str, Any] = {"last_modified_after": since_datetime} response = self._http_request( @@ -145,12 +143,18 @@ def get_partial_response(response: str, start: str, end: str): def skip_fetch_assets(last_run): + """ Checks if enough time has passed since the previous run. + Args: + last_run: Last run time. + Returns: + Returns true or false if enough time has passed since the previous run. + """ time_to_check = last_run.get("assets_last_fetch") if not time_to_check: return False - passed_time = (time.time() - time_to_check) / 60 - if passed_time < MIN_ASSETS_INTERVAL: - demisto.info(f"Skipping fetch-assets command. Only {passed_time} minutes have passed since the last fetch. " + passed_minutes = (time.time() - time_to_check) / 60 + if passed_minutes < MIN_ASSETS_INTERVAL: + demisto.info(f"Skipping fetch-assets command. Only {passed_minutes} minutes have passed since the last fetch. " f"It should be a minimum of 1 hour.") return True return False @@ -376,18 +380,20 @@ def get_activity_logs_events(client, since_datetime, max_fetch, next_page=None) return activity_logs_events, next_run_dict -def get_host_list_detections_events(client) -> list: +def get_host_list_detections_events(client, since_datetime) -> list: """ Get host list detections from qualys Args: client: Qualys client + since_datetime: The start fetch date. Returns: Host list detections assets """ demisto.debug(f'Starting to fetch assets') assets = [] next_page = '' + while True: - host_list_detections = client.get_host_list_detection(next_page=next_page) + host_list_detections = client.get_host_list_detection(since_datetime, next_page=next_page) host_list_assets, next_url = handle_host_list_detection_result(host_list_detections) or [] assets += host_list_assets next_page = get_next_page_from_url(next_url, 'id_min') @@ -402,19 +408,19 @@ def get_host_list_detections_events(client) -> list: return edited_host_detections -def get_vulnerabilities(client) -> list: +def get_vulnerabilities(client, since_datetime) -> list: """ Get vulnerabilities list from qualys Args: client: Qualys client + since_datetime: The start fetch date. Returns: list vulnerabilities """ - demisto.debug(f'Starting to fetch vulnerabilities') - host_list_detections = client.get_vulnerabilities() + demisto.debug('Starting to fetch vulnerabilities') + host_list_detections = client.get_vulnerabilities(since_datetime) vulnerabilities = handle_vulnerabilities_result(host_list_detections) or [] - demisto.debug(f'Parsed detections from hosts, got {len(vulnerabilities)=} assets.') - + demisto.debug(f'Parsed detections from hosts, got {len(vulnerabilities)=} vulnerabilities.') return vulnerabilities @@ -426,10 +432,12 @@ def fetch_assets(client): event: events to push to xsiam """ demisto.debug(f'Starting fetch for assets') - assets = get_host_list_detections_events(client) - vulnerabilities = get_vulnerabilities(client) + since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) + + assets = get_host_list_detections_events(client, since_datetime) + vulnerabilities = get_vulnerabilities(client, since_datetime) - demisto.info(f"Sending {len(assets)} assets, and {len(vulnerabilities)} vulnerabilities to XSIAM") + demisto.info(f"Pulled {len(assets)} assets, and {len(vulnerabilities)} vulnerabilities from API, sending them to XSIAM") return assets, vulnerabilities @@ -607,10 +615,11 @@ def main(): # pragma: no cover demisto.debug(f'saved lastrun assets: {assets_last_run}') if skip_fetch_assets(assets_last_run): return - demisto.setAssetsLastRun({'assets_last_fetch': time.time()}) + execution_start_time = time.time() assets, vulnerabilities = fetch_assets(client=client) send_data_to_xsiam(data=assets, vendor=VENDOR, product='host_detections', data_type='assets') send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities') + demisto.setAssetsLastRun({'assets_last_fetch': execution_start_time}) except Exception as e: return_error(f'Failed to execute {command} command.\nError:\n{str(e)}') From 8979aa63cb6f95e150c3a3e449bd2fbfec31ab28 Mon Sep 17 00:00:00 2001 From: Moshe Date: Wed, 6 Mar 2024 03:43:55 +0200 Subject: [PATCH 04/16] unify --- .../qualys/Integrations/Qualysv2/Qualysv2.py | 550 +++++++++++++++++- .../qualys/Integrations/Qualysv2/Qualysv2.yml | 45 +- .../Integrations/Qualysv2/Qualysv2_test.py | 147 ++++- .../Qualysv2/test_data/activity_logs.csv | 20 + .../test_data/host_list_detections_raw.xml | 307 ++++++++++ .../test_data/vulnerabilities_raw.xml | 130 +++++ 6 files changed, 1185 insertions(+), 14 deletions(-) create mode 100644 Packs/qualys/Integrations/Qualysv2/test_data/activity_logs.csv create mode 100644 Packs/qualys/Integrations/Qualysv2/test_data/host_list_detections_raw.xml create mode 100644 Packs/qualys/Integrations/Qualysv2/test_data/vulnerabilities_raw.xml diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index b3e6c577efcb..ad5c5311c711 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -1,8 +1,11 @@ +import copy import demistomock as demisto # noqa: F401 from CommonServerPython import * # noqa: F401 from collections.abc import Callable - - +from typing import Any +import urllib3 +import csv +import io import requests from urllib3 import disable_warnings @@ -11,6 +14,24 @@ disable_warnings() # pylint: disable=no-member """ CONSTANTS """ +VENDOR = 'qualys' +PRODUCT = 'qualys' +BEGIN_RESPONSE_LOGS_CSV = "----BEGIN_RESPONSE_BODY_CSV" +END_RESPONSE_LOGS_CSV = "----END_RESPONSE_BODY_CSV" +BEGIN_RESPONSE_FOOTER_CSV = "----BEGIN_RESPONSE_FOOTER_CSV" +END_RESPONSE_FOOTER_CSV = "----END_RESPONSE_FOOTER_CSV" +WARNING = 'WARNING' +ACTIVITY_LOGS_NEWEST_EVENT_DATETIME = 'activity_logs_newest_event_datetime' +ACTIVITY_LOGS_NEXT_PAGE = 'activity_logs_next_page' +ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN = 'activity_logs_since_datetime_prev_run' +HOST_DETECTIONS_NEWEST_EVENT_DATETIME = 'host_detections_newest_event_datetime' +HOST_DETECTIONS_NEXT_PAGE = 'host_detections_next_page' +HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN = 'host_detections_since_datetime_prev_run' +HOST_LAST_FETCH = 'host_last_fetch' +ASSETS_FETCH_FROM = '90 days' +MIN_ASSETS_INTERVAL = 59 + +ASSETS_DATE_FORMAT = '%Y-%m-%d' DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ" # ISO8601 format with UTC, default in XSOAR API_SUFFIX = "/api/2.0/fo/" @@ -1610,6 +1631,85 @@ def command_http_request(self, command_api_data: dict[str, str]) -> Union[str, b error_handler=self.error_handler, ) + def get_user_activity_logs(self, since_datetime: str, max_fetch: int = 0, next_page=None) -> Union[str, bytes]: + """ + Make a http request to Qualys API to get user activities logs + Args: + Returns: + response from Qualys API + Raises: + DemistoException: can be raised by the _http_request function + """ + self._headers.update({"Content-Type": 'application/json'}) + params: dict[str, Any] = { + "truncation_limit": max_fetch + } + if since_datetime: + params["since_datetime"] = since_datetime + if next_page: + params["id_max"] = next_page + + response = self._http_request( + method='GET', + url_suffix=urljoin(API_SUFFIX, 'activity_log/?action=list'), + resp_type='text/csv', + params=params, + timeout=60, + error_handler=self.error_handler, + ) + + return response.text + + def get_host_list_detection(self, since_datetime, next_page=None) -> Union[str, bytes]: + """ + Make a http request to Qualys API to get assets + Args: + Returns: + response from Qualys API + Raises: + DemistoException: can be raised by the _http_request function + """ + self._headers.update({"Content-Type": 'application/json'}) + params: dict[str, Any] = { + "truncation_limit": 3, + "vm_scan_date_after": since_datetime + } + if next_page: + params["id_min"] = next_page + + response = self._http_request( + method='GET', + url_suffix=urljoin(API_SUFFIX, 'asset/host/vm/detection/?action=list'), + resp_type='text', + params=params, + timeout=60, + error_handler=self.error_handler, + ) + return response + + def get_vulnerabilities(self, since_datetime) -> Union[str, bytes]: + """ + Make a http request to Qualys API to get vulnerabilities + Args: + Returns: + response from Qualys API + Raises: + DemistoException: can be raised by the _http_request function + """ + self._headers.update({"Content-Type": 'application/json'}) + params: dict[str, Any] = {"last_modified_after": since_datetime} + + response = self._http_request( + method='POST', + url_suffix=urljoin(API_SUFFIX, 'knowledge_base/vuln/?action=list'), + resp_type='text', + params=params, + timeout=60, + error_handler=self.error_handler, + ) + + return response + """ HELPER FUNCTIONS """ @@ -2574,9 +2674,392 @@ def build_tag_asset_output(**kwargs) -> tuple[List[Any], str]: return handled_result, readable_output -""" COMMAND FUNCTIONS """ +def get_partial_response(response: str, start: str, end: str): + """ Cut response string from start to end tokens. + """ + if start not in response or end not in response: + return None + start_index = response.index(start) + len(start) + end_index = response.index(end) + result = response[start_index:end_index].strip() + if result.startswith(WARNING): + result = result.replace(WARNING, '').strip() + return result + + +def skip_fetch_assets(last_run): + """ Checks if enough time has passed since the previous run. + Args: + last_run: Last run time. + Returns: + Returns true or false if enough time has passed since the previous run. + """ + time_to_check = last_run.get("assets_last_fetch") + if not time_to_check: + return False + passed_minutes = (time.time() - time_to_check) / 60 + if passed_minutes < MIN_ASSETS_INTERVAL: + demisto.info(f"Skipping fetch-assets command. Only {passed_minutes} minutes have passed since the last fetch. " + f"It should be a minimum of 1 hour.") + return True + return False + + +def csv2json(csv_data: str): + """ Converts data from csv to json + Args: + csv_data: data in csv format + Returns: + the same data in json formal + """ + reader = csv.DictReader(io.StringIO(csv_data)) + json_data = list(reader) + return json_data + + +def get_next_page_from_url(url, field): + """ + Get the next page field from url. + """ + match = re.search(rf"{field}=(\d+)", url) + res = match.group(1) if match else None + return res + + +def get_next_page_activity_logs(footer): + """ + Extracts the next token from activity logs response. + """ + if isinstance(footer, list): + footer = footer[0] + next_url = footer.get('URL', '') + max_id = get_next_page_from_url(next_url, 'id_max') + return max_id + + +def handle_host_list_detection_result(raw_response: requests.Response) -> tuple[Optional[list], Optional[str]]: + """ + Handles Host list detection response - parses xml to json and gets the list + Args: + raw_response (requests.Response): the raw result received from Qualys API command + Returns: + List with data generated for the result given + """ + formatted_response = parse_raw_response(raw_response) + simple_response = get_simple_response_from_raw(formatted_response) + if simple_response and simple_response.get("CODE"): + raise DemistoException(f"\n{simple_response.get('TEXT')} \nCode: {simple_response.get('CODE')}") + + response_requested_value = dict_safe_get(formatted_response, + ["HOST_LIST_VM_DETECTION_OUTPUT", "RESPONSE", "HOST_LIST", "HOST"]) + response_next_url = dict_safe_get(formatted_response, + ["HOST_LIST_VM_DETECTION_OUTPUT", "RESPONSE", "WARNING", "URL"], default_return_value='') + if isinstance(response_requested_value, dict): + response_requested_value = [response_requested_value] + + return response_requested_value, response_next_url + + +def handle_vulnerabilities_result(raw_response: requests.Response) -> tuple[Optional[list], Optional[str]]: + """ + Handles vulnerabilities response - parses xml to json and gets the list + Args: + raw_response (requests.Response): the raw result received from Qualys API command + Returns: + List with data generated for the result given + """ + formatted_response = parse_raw_response(raw_response) + + vulnerabilities = dict_safe_get(formatted_response, ['KNOWLEDGE_BASE_VULN_LIST_OUTPUT', 'RESPONSE', 'VULN_LIST', 'VULN']) + if isinstance(vulnerabilities, dict): + vulnerabilities = [vulnerabilities] + + return vulnerabilities + + +def remove_last_events(events, time_to_remove, time_field): + """ Removes events with certain time. + Args: + events: list of events to remove the time from + time_to_remove: remove events with this time + time_field: the field name where the time is + """ + new_events = [] + for event in events: + if event.get(time_field) == time_to_remove: + demisto.debug(f'Removed activity log event with time: {time_to_remove}, log: {event}') + else: + new_events.append(event) + return new_events + + +def add_fields_to_events(events, time_field_path, event_type_field): + """ + Adds the _time key to the events. + Args: + events: List[Dict] - list of events to add the _time key to. + time_field_path: the list of fields to get _time from + event_type_field: type field in order to distinguish between the API's + Returns: + list: The events with the _time key. + """ + if events: + for event in events: + event['_time'] = dict_safe_get(event, time_field_path) + event['event_type'] = event_type_field + + +def get_detections_from_hosts(hosts): + """ + Parses detections from hosts. + Each host contains list of detections: + {'ID':1, + 'IP': '1.1.1.1', + 'LAST_VM_SCANNED_DATE': '01-01-2020', + 'DETECTION_LIST': {'DETECTION': [first_detection_data, second_detection, ...]} + 'additional_fields': ... + } + + The function parses the data in the following way: + {''ID':1, + 'IP': '1.1.1.1', + 'LAST_VM_SCANNED_DATE': '01-01-2020', + 'DETECTION': first_detection_data + 'additional_fields': ... + }, + {'ID':1, + 'IP': '1.1.1.1', + 'LAST_VM_SCANNED_DATE': '01-01-2020', + 'DETECTION': second_detection_data + 'additional_fields': ... + } + .... + + :param hosts: list of hosts that contains detections. + :return: parsed events. + """ + fetched_events = [] + for host in hosts: + if detections_list := host.get('DETECTION_LIST', {}).get('DETECTION'): + if isinstance(detections_list, list): + for detection in detections_list: + new_detection = copy.deepcopy(host) + del new_detection['DETECTION_LIST'] + new_detection['DETECTION'] = detection + fetched_events.append(new_detection) + elif isinstance(detections_list, dict): + new_detection = copy.deepcopy(host) + new_detection['DETECTION'] = detections_list + del new_detection['DETECTION_LIST'] + fetched_events.append(new_detection) + else: + del host['DETECTION_LIST'] + host['DETECTION'] = {} + fetched_events.append(host) + return fetched_events + + +def get_activity_logs_events(client, since_datetime, max_fetch, next_page=None) -> tuple[Optional[list], dict]: + """ Get logs activity from qualys + API response returns events sorted in descending order. We are saving the next_page param and + sending next request with next_page arg if needed. Saving the newest event fetched. + We are deleting the newest event each time to avoid duplication. + Args: + client: Qualys client + since_datetime: datetime to get events from + max_fetch: max number of events to return + next_page: pagination marking + Returns: + Logs activity events, Next run datetime + """ + demisto.debug(f'Starting to fetch activity logs events: since_datetime={since_datetime}, next_page={next_page}') + activity_logs = client.get_user_activity_logs(since_datetime=since_datetime, max_fetch=max_fetch, next_page=next_page) + activity_logs_events = csv2json(get_partial_response(activity_logs, BEGIN_RESPONSE_LOGS_CSV, + END_RESPONSE_LOGS_CSV) or activity_logs) or [] + footer_json = csv2json(get_partial_response(activity_logs, BEGIN_RESPONSE_FOOTER_CSV, + END_RESPONSE_FOOTER_CSV)) or {} + new_next_page = get_next_page_activity_logs(footer_json) + demisto.debug(f'Got activity logs events from server: {len(activity_logs_events)=}.') + + newest_event_time = activity_logs_events[0].get('Date') if activity_logs_events else since_datetime + + if not next_page: + activity_logs_events = remove_last_events(activity_logs_events, newest_event_time, 'Date') + add_fields_to_events(activity_logs_events, ['Date'], 'activity_log') + + next_run_dict = { + ACTIVITY_LOGS_NEWEST_EVENT_DATETIME: newest_event_time, + ACTIVITY_LOGS_NEXT_PAGE: new_next_page, + ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN: since_datetime, + } + demisto.debug(f'Done to fetch activity logs events: {next_run_dict=}, sending {len(activity_logs_events)} events.') + return activity_logs_events, next_run_dict + + +def get_host_list_detections_events(client, since_datetime) -> list: + """ Get host list detections from qualys + Args: + client: Qualys client + since_datetime: The start fetch date. + Returns: + Host list detections assets + """ + demisto.debug(f'Starting to fetch assets') + assets = [] + next_page = '' + + while True: + host_list_detections = client.get_host_list_detection(since_datetime, next_page=next_page) + host_list_assets, next_url = handle_host_list_detection_result(host_list_detections) or [] + assets += host_list_assets + next_page = get_next_page_from_url(next_url, 'id_min') + if not next_page: + break + + edited_host_detections = get_detections_from_hosts(assets) + demisto.debug(f'Parsed detections from hosts, got {len(edited_host_detections)=} assets.') + + add_fields_to_events(edited_host_detections, ['DETECTION', 'FIRST_FOUND_DATETIME'], 'host_list_detection') + + return edited_host_detections +def get_vulnerabilities(client, since_datetime) -> list: + """ Get vulnerabilities list from qualys + Args: + client: Qualys client + since_datetime: The start fetch date. + Returns: + list vulnerabilities + """ + demisto.debug('Starting to fetch vulnerabilities') + host_list_detections = client.get_vulnerabilities(since_datetime) + vulnerabilities = handle_vulnerabilities_result(host_list_detections) or [] + + demisto.debug(f'Parsed detections from hosts, got {len(vulnerabilities)=} vulnerabilities.') + return vulnerabilities + + +def fetch_assets(client): + """ Fetches host list detections + Args: + client: command client + Return: + event: events to push to xsiam + """ + demisto.debug(f'Starting fetch for assets') + since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) + + assets = get_host_list_detections_events(client, since_datetime) + vulnerabilities = get_vulnerabilities(client, since_datetime) + + demisto.info(f"Pulled {len(assets)} assets, and {len(vulnerabilities)} vulnerabilities from API, sending them to XSIAM") + return assets, vulnerabilities + + +def fetch_events(client, last_run, first_fetch_time, fetch_function, newest_event_field, next_page_field, + previous_run_time_field, max_fetch: Optional[int] = 0): + """ Fetches activity logs and host list detections + Args: + client: command client + last_run: last fetch time + first_fetch_time: when start to fetch from + fetch_function: function that gets the events + max_fetch: max number of items to return (0 to return all) + newest_event_field + next_page_field + previous_run_time_field + Return: + next_last_run: where to fetch from next time + event: events to push to xsiam + """ + demisto.debug(f'Starting fetch for {fetch_function.__name__}, last run: {last_run}') + newest_event_time = last_run.get(newest_event_field) if last_run else None + next_page = last_run.get(next_page_field) + previous_time_field = last_run.get(previous_run_time_field) + + if not newest_event_time: + newest_event_time = first_fetch_time + + time_to_fetch = newest_event_time if not next_page else previous_time_field + + events, new_next_run = fetch_function(client, time_to_fetch, max_fetch, next_page) + + updated_next_run = {previous_run_time_field: time_to_fetch} + new_next_page = new_next_run.get(next_page_field) + + # if the fetch is not during the pagination (fetched without next_page) + if not next_page: + # update the newest event + updated_next_run[newest_event_field] = new_next_run.get(newest_event_field) + + # update if there is next page and this fetch is not over + updated_next_run[next_page_field] = new_next_page + + if last_fetch_time := new_next_run.get(HOST_LAST_FETCH): + updated_next_run[HOST_LAST_FETCH] = last_fetch_time + + demisto.info(f"Sending len{len(events)} to XSIAM. updated_next_run={updated_next_run}.") + return updated_next_run, events + + +def get_activity_logs_events_command(client, args, first_fetch_time): + """ + Args: + client: command client + args: Demisto args for this command: limit and since_datetime + first_fetch_time: first fetch time + Retuns: + Command results with activity logs + + """ + limit = arg_to_number(args.get('limit', 50)) + offset = arg_to_number(args.get('offset', 0)) + since_datetime = arg_to_datetime(args.get('since_datetime')) + since_datetime = since_datetime.strftime(DATE_FORMAT) if since_datetime else first_fetch_time + activity_logs_events, _ = get_activity_logs_events( + client=client, + since_datetime=since_datetime, + max_fetch=0, + ) + limited_activity_logs_events = activity_logs_events[offset:limit + offset] # type: ignore[index,operator] + activity_logs_hr = tableToMarkdown(name='Activity Logs', t=limited_activity_logs_events) + results = CommandResults( + readable_output=activity_logs_hr, + raw_response=limited_activity_logs_events, + ) + + return limited_activity_logs_events, results + + +# def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) -> str: +# """ +# Tests API connectivity and authentication' +# When 'ok' is returned it indicates the integration works like it is supposed to and connection to the service is +# successful. +# Raises exceptions if something goes wrong. +# Args: +# client (Client): HelloWorld client to use. +# params (Dict): Integration parameters. +# first_fetch_time (int): The first fetch time as configured in the integration params. +# Returns: +# str: 'ok' if test passed, anything else will raise an exception and will fail the test. +# """ +# fetch_events( +# client=client, +# last_run={}, +# first_fetch_time=first_fetch_time, +# max_fetch=1, +# fetch_function=get_activity_logs_events, +# newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, +# next_page_field=ACTIVITY_LOGS_NEXT_PAGE, +# previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, +# ) +# +# fetch_assets(client=client) +# +# return 'ok' + @logger def test_module(client: Client) -> str: """ @@ -2659,8 +3142,10 @@ def qualys_command_flow_manager( def main(): # pragma: no cover params = demisto.params() + args = demisto.args() + command = demisto.command() - base_url = params["url"] + base_url = params.get('url') verify_certificate = not params.get("insecure", False) proxy = params.get("proxy", False) username = params.get("credentials").get("identifier") @@ -2907,27 +3392,68 @@ def main(): # pragma: no cover }, } - requested_command = demisto.command() - - demisto.debug(f"Command being called is {requested_command}") + demisto.debug(f"Command being called is {command}") try: - headers: dict = {"X-Requested-With": "Demisto"} - + headers: dict = {"X-Requested-With": "Cortex"} client = Client( base_url=base_url, username=username, password=password, verify=verify_certificate, headers=headers, proxy=proxy ) - if requested_command == "test-module": + first_fetch_datetime: datetime = arg_to_datetime( # type: ignore[assignment] + arg=params.get('first_fetch', '3 days'), + arg_name='First fetch time', + required=True + ) + first_fetch_str = first_fetch_datetime.strftime(DATE_FORMAT) + + if command == "test-module": text_res = test_module(client) return_results(text_res) + + elif command == "qualys-get-events": + should_push_events = argToBoolean(args.get('should_push_events', False)) + events, results = get_activity_logs_events_command(client, args, first_fetch_str) + return_results(results) + if should_push_events: + send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT) + + elif command == 'fetch-events': + last_run = demisto.getLastRun() + max_fetch_activity_logs = arg_to_number(params.get("max_fetch_activity_logs", 0)) + logs_next_run, activity_logs_events = fetch_events( + client=client, + last_run=last_run, + newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, + next_page_field=ACTIVITY_LOGS_NEXT_PAGE, + previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, + fetch_function=get_activity_logs_events, + first_fetch_time=first_fetch_str, + max_fetch=max_fetch_activity_logs, + ) + send_events_to_xsiam(activity_logs_events, vendor=VENDOR, product=PRODUCT) + + # saves next_run for the time fetch-events is invoked + demisto.setLastRun(logs_next_run) + + elif command == 'fetch-assets': + assets_last_run = demisto.getAssetsLastRun() + demisto.debug(f'saved lastrun assets: {assets_last_run}') + if skip_fetch_assets(assets_last_run): + return + execution_start_time = time.time() + assets, vulnerabilities = fetch_assets(client=client) + send_data_to_xsiam(data=assets, vendor=VENDOR, product='host_detections', data_type='assets') + send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities') + demisto.setAssetsLastRun({'assets_last_fetch': execution_start_time}) + else: return_results( - qualys_command_flow_manager(client, demisto.args(), requested_command, commands_methods[requested_command]) + qualys_command_flow_manager(client, demisto.args(), command, commands_methods[command]) ) except Exception as e: demisto.error(traceback.format_exc()) # print the traceback - return_error(f"Failed to execute {requested_command} command.\nError:\n{str(e)}") + return_error(f"Failed to execute {command} command.\nError:\n{str(e)}") if __name__ in ("__main__", "__builtin__", "builtins"): diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml index 06d8f56edbe3..14968459796c 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml @@ -1,4 +1,7 @@ category: Vulnerability Management +sectionOrder: +- Connect +- Collect commonfields: id: QualysV2 version: -1 @@ -9,20 +12,40 @@ configuration: required: true type: 0 additionalinfo: "When using asset-tag commands, the official documentation recommends that the SERVER URL parameter should be in the following format: `https://qualysapi..apps.qualys.com/`. For more details see the integration documentation." + section: Connect - display: Username name: credentials required: true type: 9 + section: Connect - display: Trust any certificate (not secure) name: insecure type: 8 required: false + section: Connect + advanced: true - display: Use system proxy settings name: proxy type: 8 required: false + section: Connect + advanced: true +- defaultvalue: 3 days + section: Collect + display: First event fetch time + name: first_fetch + additionalinfo: If "First event fetch time" is set for a long time ago, it may cause performance issues. + required: true + type: 0 +- defaultvalue: 10000 + section: Collect + display: Event Fetch Limit + name: max_fetch_activity_logs + additionalinfo: Maximum number of events to fetch per fetch iteration. + required: true + type: 0 description: Qualys Vulnerability Management lets you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. -display: Qualys v2 +display: Qualys VMDR name: QualysV2 script: commands: @@ -2760,7 +2783,27 @@ script: - name: csv_data description: The CSV data file containing the vCenter - ESXi mapping records that you want to purge. required: true + - arguments: + - auto: PREDEFINED + defaultValue: 'false' + description: If true, the command will create events, otherwise it will only display them. + name: should_push_events + predefined: + - 'true' + - 'false' + required: true + - description: Maximum number of results to return. + name: limit + - description: Date to return results from. + name: since_datetime + - description: Offset which events to return. + name: offset + description: Gets activity logs from Qualys. + name: qualys-get-events dockerimage: demisto/python3:3.10.13.87159 + isfetchevents: true + isfetchassets: true + runonce: false script: '' subtype: python3 type: python diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py index bdd3fb2d835c..bf978b44eaa1 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py @@ -22,9 +22,154 @@ parse_raw_response, get_simple_response_from_raw, validate_required_group, + get_activity_logs_events_command, + fetch_events, get_activity_logs_events, fetch_assets, ASSETS_FETCH_FROM, ASSETS_DATE_FORMAT ) -from CommonServerPython import DemistoException +from CommonServerPython import * # noqa: F401 + + +ACTIVITY_LOGS_NEWEST_EVENT_DATETIME = 'activity_logs_newest_event_datetime' +ACTIVITY_LOGS_NEXT_PAGE = 'activity_logs_next_page' +ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN = 'activity_logs_since_datetime_prev_run' +HOST_DETECTIONS_NEWEST_EVENT_DATETIME = 'host_detections_newest_event_datetime' +HOST_DETECTIONS_NEXT_PAGE = 'host_detections_next_page' +HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN = 'host_detections_since_datetime_prev_run' +HOST_LAST_FETCH = 'host_last_fetch' +BEGIN_RESPONSE_LOGS_CSV = "----BEGIN_RESPONSE_BODY_CSV" +END_RESPONSE_LOGS_CSV = "----END_RESPONSE_BODY_CSV" +FOOTER = """----BEGIN_RESPONSE_FOOTER_CSV +WARNING +"CODE","TEXT","URL" +"1980","17 record limit exceeded. Use URL to get next batch of results.","https://server_url/api/2.0/fo/activity_log/ +?action=list&since_datetime=2022-12-21T03:42:05Z&truncation_limit=10&id_max=123456" +----END_RESPONSE_FOOTER_CSV""" + + +def test_get_activity_logs_events_command(requests_mock): + """ + Given: + - activity_logs_events_command + + When: + - Want to list all existing activity logs + + Then: + - Ensure Activity Logs Results in human-readable, and number of results reasonable. + """ + base_url = 'https://server_url/' + with open('test_data/activity_logs.csv') as f: + logs = f.read() + requests_mock.get(f'{base_url}api/2.0/fo/activity_log/' + f'?action=list&truncation_limit=0&since_datetime=2023-03-01T00%3A00%3A00Z', text=logs) + client = Client(base_url=base_url, + verify=True, + headers={}, + proxy=False, + username='demisto', + password='demisto', + ) + args = {'limit': 50, 'since_datetime': '1 March 2023'} + first_fetch = '2022-03-21T03:42:05Z' + activity_logs_events, results = get_activity_logs_events_command(client, args, first_fetch) + assert 'Activity Logs' in results.readable_output + assert len(activity_logs_events) == 17 + + +@pytest.mark.parametrize('activity_log_last_run, logs_number, add_footer', + [(None, 17, True), + ("2023-05-24T09:55:35Z", 0, True), + ("2023-05-14T15:04:55Z", 7, True), + ("2023-01-01T08:06:44Z", 17, False)]) +def test_fetch_logs_events_command(requests_mock, activity_log_last_run, logs_number, add_footer): + """ + Given: + - fetch events command (fetches logs) + + When: + - Running fetch-events command + + Then: + - Ensure number of events fetched + - Ensure next page token saved + - Ensure previous run saved + - Ensure newest event time saved + """ + first_fetch_str = '2022-12-21T03:42:05Z' + base_url = 'https://server_url/' + truncation_limit = logs_number + with open('test_data/activity_logs.csv') as f: + logs = f.read() + new_logs = f'{BEGIN_RESPONSE_LOGS_CSV}' + for row in logs.split('\n'): + if activity_log_last_run and activity_log_last_run in row: + new_logs += f'{row}\n' + break + new_logs += f'{row}\n' + new_logs += f'{END_RESPONSE_LOGS_CSV}\n' + if add_footer: + new_logs += f'{FOOTER}\n' + + requests_mock.get(f'{base_url}api/2.0/fo/activity_log/' + f'?action=list&truncation_limit={truncation_limit}&' + f'since_datetime={activity_log_last_run if activity_log_last_run else first_fetch_str}', + text=new_logs) + client = Client(base_url=base_url, + verify=True, + headers={}, + proxy=False, + username='demisto', + password='demisto', + ) + last_run = {ACTIVITY_LOGS_NEWEST_EVENT_DATETIME: activity_log_last_run} + + logs_next_run, activity_logs_events = fetch_events( + client=client, + last_run=last_run, + newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, + next_page_field=ACTIVITY_LOGS_NEXT_PAGE, + previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, + fetch_function=get_activity_logs_events, + first_fetch_time=first_fetch_str, + max_fetch=truncation_limit, + ) + assert len(activity_logs_events) == logs_number + assert logs_next_run.get(ACTIVITY_LOGS_NEXT_PAGE) == ("123456" if add_footer else None) + assert logs_next_run.get(ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN) == activity_log_last_run or first_fetch_str + assert logs_next_run.get(ACTIVITY_LOGS_NEWEST_EVENT_DATETIME) == "2023-05-24T09:55:35Z" + + +def test_fetch_assets_command(requests_mock): + """ + Given: + - fetch_assets_command + When: + - Want to list all existing incidents + Then: + - Ensure List assets and vulnerabilities. + """ + base_url = 'https://server_url/' + with open('./test_data/host_list_detections_raw.xml') as f: + assets = f.read() + with open('./test_data/vulnerabilities_raw.xml') as f: + vulnerabilities = f.read() + requests_mock.get(f'{base_url}api/2.0/fo/asset/host/vm/detection/' + f'?action=list&truncation_limit=3&vm_scan_date_after={arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', text=assets) + + requests_mock.post(f'{base_url}api/2.0/fo/knowledge_base/vuln/' + f'?action=list&last_modified_after={arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', text=vulnerabilities) + + client = Client(base_url=base_url, + verify=True, + headers={}, + proxy=False, + username='demisto', + password='demisto', + ) + assets, vulnerabilities = fetch_assets(client=client) + + assert len(assets) == 8 + assert len(vulnerabilities) == 2 class TestIsEmptyResult: diff --git a/Packs/qualys/Integrations/Qualysv2/test_data/activity_logs.csv b/Packs/qualys/Integrations/Qualysv2/test_data/activity_logs.csv new file mode 100644 index 000000000000..b8f861f7469d --- /dev/null +++ b/Packs/qualys/Integrations/Qualysv2/test_data/activity_logs.csv @@ -0,0 +1,20 @@ +"Date","Action","Module","Details","User Name","User Role","User IP" +"2023-05-24T09:55:35Z","request","auth","API: /api/2.0/fo/asset/host/vm/detection/index.php","demisto-user","Manager","1.2.2.2" +"2023-05-24T09:55:35Z","request","auth","API: /api/2.0/fo/asset/host/vm/detection/index.php","demisto-user","Manager","1.2.2.2" +"2023-05-17T08:06:55Z","finished","report","Report generation finished.(TITLE:, ID:14359440, FORMAT:PDF, Report Size:19.38 KB, Report Duration:12 seconds)","demisto-user","Manager","N/A" +"2023-05-17T08:05:44Z","update","account","Network Global Default Network IPs removed from Excluded IPs: 1.1.1.1","demisto-user","Manager","1.2.2.3" +"2023-05-16T14:49:07Z","options","scan","API scan (ref: scan/1684248547.49723) options: Standard TCP port list, Standard UDP port list, parallel ML scaling disabled for appliances, Load balancer detection OFF, Intrusive Checks: Excluded, ICMP Host Discovery, Overall Performance: Normal, Hosts to Scan in Parallel - External Scanners: 15, Hosts to Scan in Parallel - Scanner Appliances: 30, Total Processes to Run in Parallel: 10, HTTP Processes to Run in Parallel: 10, Packet (Burst) Delay: Medium, Intensity: Normal, appliances: External, target: 1.1.1.1","demisto-user","Manager","1.2.2.3" +"2023-05-16T14:48:41Z","update","option","API-IPs/Ranges updated: 1.1.1.4","demisto-user","Manager","1.2.2.3" +"2023-05-16T14:48:41Z","set","host_attribute","comment=[update] for 1.1.1.4","demisto-user","Manager","1.2.2.3" +"2023-05-14T16:13:53Z","update","addon","Updated subscription: 112155 for addon: [PC] set is_trial from 1 to 0","demisto-user","Manager","35.202.234.166" +"2023-05-14T15:04:55Z","set","admin","ML Custom Scan Header disabled, Total IPs purchased for Security Configuration Assessment set from '0' to '', Account End Date set from '01/18/2024 at 23:59:59 GMT' to '01/18/2024 at 23:59:59 GMT', Expiry Notification Enable, VM Agent Purchased set to: 10, PC Agent Purchased set to: 5, CM Asset Access set to: EXTERNAL for demisto-user by quays2el7","demisto-user","Manager","1.2.2.3" +"2023-03-21T02:46:36Z","add","appliance","Appliances {4567856} to Asset Group ID 87635234","demisto-user","Manager","1.1.1.5" +"2023-03-04T16:02:03Z","cancel","cm_scan","cm_scan (ref: compliance/16702578.45786) canceled by system, exceeded maximum queue time (30 days), target: 1.1.1.1","demisto-user","Manager","N/A" +"2023-03-03T03:08:08Z","delete","schedule","scheduled task '1.2.2.3' deleted. Type: 'SCAN'","demisto-user","Manager","1.1.1.5" +"2023-03-03T02:48:49Z","add","vhost","Added Virtual Hosts '1.2.2.3:22:=www.example.com'","demisto-user","Manager","1.1.1.5" +"2023-03-03T02:46:36Z","update","asset_group","Update Asset Group {""requestId"":""08003610-d1f4-4bd6-818d-68c206acef9b"", ""assetGroupId"":""3035480"", ""name"":""XSOAR_LAB"", ""businessImpact"":""HIGH"", ""cvssDamagePotential"":null, ""cvssTargetDistribution"":null}","demisto-user","Manager","1.1.1.5" +"2023-03-02T03:15:41Z","set","host_attribute","owner=demst2dm for 1.2.2.3","demisto-user","Manager","1.1.1.5" +"2023-01-16T14:04:48Z","entity","account","Entity ReFresh {""requestId"":""12344567890"", ""entityName"":""Subscription"", ""operation"":""Update"", ""entityIds"":[""112155""]}","test2tk4","N/A","1.1.2.8" +"2023-01-07T08:13:56Z","create","asset_group","Create Asset Group {""requestId"":""12345678908b"", ""networkId"":null, ""ips"":""8.8.8.8"", ""name"":""Test-XSOAR-ASM"", ""netBios"":{""netBiosToAdd"":""junk""}, ""businessImpact"":""HIGH"", ""businessDivision"":""dev"", ""businessFunction"":""dev"", ""businessLocation"":""dev"", ""cvssDamagePotential"":null, ""cvssTargetDistribution"":null}","demisto-user","Manager","1.1.1.4" +"2023-01-03T03:19:44Z","set","host_attribute","comment=[Test] owner=demst2es Location=[santa clara] Function=[dev] Asset Tag=[dev] for 1.1.2.9","demisto-user","Manager","1.1.1.5" +"2023-01-01T08:06:44Z","log","report","Template settings:; Trend Duration:Last 30 days; Timeframe Selection Range: None; Group By:Hosts; Status:Passed, Failed and Error; Criticality:UNDEFINED, MINIMAL, MEDIUM, SERIOUS, CRITICAL, URGENT; Control Statistics:No; Host Statistics:Yes; Cloud Metadata:No; Report details:Yes; Show control rationale:Yes; Show hosts summary:Yes; Show control evidence:Yes; Show scan parameter:Yes; Show extended evidence:Yes; Show control exceptions:Yes; Show exception history:Yes; Show control glossary:No; Show Appendix:Yes; Frameworks Selected:All; title='Policy Report Template'; global='Yes'; Trend Duration= Last 30 days; Group By=Hosts; Status=Passed, Failed and Error; Criticality=UNDEFINED, MINIMAL, MEDIUM, SERIOUS, CRITICAL, URGENT; User access=; include_appendix = 1; include_control_comments = ; include_control_evidence = 1; include_control_exceptions = 1; include_control_ext_evidence = 1; include_control_mappings = ; include_control_rationale = 1; include_control_scan_parameter = 1; include_control_stats = ; include_control_summary = ; include_control_tech_summary = ; include_exceptions_history = 1; include_glossary = ; include_host_stats = 1; include_host_summary = 1; include_posture_status = 7; include_report_details = 1; include_report_summary = ; report_group_by = group_by_hosts; report_sort_by = ; trend_duration = 30; use_framework_filtering = 0; Owner = 237849","demisto-user","Manager","N/A" diff --git a/Packs/qualys/Integrations/Qualysv2/test_data/host_list_detections_raw.xml b/Packs/qualys/Integrations/Qualysv2/test_data/host_list_detections_raw.xml new file mode 100644 index 000000000000..33249987ec7e --- /dev/null +++ b/Packs/qualys/Integrations/Qualysv2/test_data/host_list_detections_raw.xml @@ -0,0 +1,307 @@ + + + + + + 2023-05-24T09:59:43Z + + + 143111841 + 1.1.1.1 + DNS + + + + + + + + 2023-05-16T15:26:53Z + 2023-05-16T15:26:01Z + 2130 + 2022-12-06T12:03:46Z + + + 11827 + Confirmed + 2 + 443 + tcp + 0 + + Active + 2023-05-16T15:26:53Z + 2023-05-16T15:26:01Z + 216 + 2023-05-16T15:26:01Z + 2023-05-16T15:26:53Z + 2022-12-07T10:06:40Z + 0 + 0 + 2023-05-16T15:26:53Z + + + 15033 + Confirmed + 4 + 53 + udp + 0 + + Active + 2023-05-16T15:26:53Z + 2023-05-16T15:26:01Z + 217 + 2023-05-16T15:26:01Z + 2023-05-16T15:26:53Z + 0 + 0 + 2023-05-16T15:26:53Z + + + 15034 + Confirmed + 2 + 53 + udp + 0 + + Active + 2023-05-16T15:26:53Z + 2023-05-16T15:26:01Z + 217 + 2023-05-16T15:26:01Z + 2023-05-16T15:26:53Z + 0 + 0 + 2023-05-16T15:26:53Z + + + 15068 + Confirmed + 2 + 53 + udp + 0 + + Active + 2023-05-14T15:04:55Z + 2023-05-16T15:26:01Z + 216 + 2023-05-16T15:26:01Z + 2023-05-16T15:26:53Z + 2022-06-19T18:31:48Z + 0 + 0 + 2023-05-16T15:26:53Z + + + 38628 + Confirmed + 3 + 443 + tcp + 1 + + Active + 2023-05-14T15:04:55Z + 2023-05-16T15:26:01Z + 217 + 2023-05-16T15:26:01Z + 2023-05-16T15:26:53Z + 0 + 0 + 2023-05-16T15:26:53Z + + + 38794 + Confirmed + 3 + 443 + tcp + 1 + + Active + 2023-05-14T15:04:55Z + 2023-05-16T15:26:01Z + 213 + 2023-05-16T15:26:01Z + 2023-05-16T15:26:53Z + 0 + 0 + 2023-05-16T15:26:53Z + + + + + 10112200 + 1.2.2.2 + IP + + + + + + + 2023-03-21T03:43:11Z + 2023-03-21T03:42:05Z + 404 + + + 38726 + Potential + 3 + 0 + + New + 2023-05-16T15:26:53Z + 2023-03-21T03:42:05Z + 1 + 2023-03-21T03:42:05Z + 2023-03-21T03:43:11Z + 0 + 0 + 2023-03-21T03:43:11Z + + + 38739 + Confirmed + 3 + 22 + tcp + 0 + + New + 2023-03-21T03:42:05Z + 2023-03-21T03:42:05Z + 1 + 2023-03-21T03:42:05Z + 2023-03-21T03:43:11Z + 0 + 0 + 2023-03-21T03:43:11Z + + + + + + + + diff --git a/Packs/qualys/Integrations/Qualysv2/test_data/vulnerabilities_raw.xml b/Packs/qualys/Integrations/Qualysv2/test_data/vulnerabilities_raw.xml new file mode 100644 index 000000000000..7dd200b979de --- /dev/null +++ b/Packs/qualys/Integrations/Qualysv2/test_data/vulnerabilities_raw.xml @@ -0,0 +1,130 @@ + + + + + + 2024-02-12T15:21:40Z + + + 10052 + Vulnerability + 3 + <![CDATA[Vtecrm Vtenext Multiple Security Vulnerabilities]]> + CGI + 2023-12-19T12:02:30Z + 2021-01-21T12:51:22Z + 0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + + + + 1 + Exploit Available + + + + 10186 + Vulnerability + 2 + <![CDATA[]]> + CGI + 2024-02-10T12:03:39Z + 2000-11-10T11:00:00Z + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + 1 + Patch Available, Exploit Available + + + + + + + From 65f161e927098023e7652e54bb7d315c99cdb657 Mon Sep 17 00:00:00 2001 From: Moshe Date: Wed, 6 Mar 2024 10:21:19 +0200 Subject: [PATCH 05/16] removed the collector --- .../QualysEventCollector.py | 629 ------------------ .../QualysEventCollector.yml | 79 --- .../QualysEventCollector_description.md | 20 - .../QualysEventCollector_image.png | Bin 3555 -> 0 bytes .../QualysEventCollector_test.py | 148 ----- .../QualysEventCollector/README.md | 71 -- .../QualysEventCollector/command_examples | 2 - .../test_data/activity_logs.csv | 20 - .../test_data/host_list_detections_raw.xml | 307 --------- .../test_data/vulnerabilities_raw.xml | 130 ---- .../Qualysv2_dark.svg} | 0 .../Qualysv2_light.svg} | 0 Packs/qualys/Integrations/Qualysv2/README.md | 16 +- 13 files changed, 9 insertions(+), 1413 deletions(-) delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_description.md delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_image.png delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/README.md delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/command_examples delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/test_data/activity_logs.csv delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/test_data/host_list_detections_raw.xml delete mode 100644 Packs/qualys/Integrations/QualysEventCollector/test_data/vulnerabilities_raw.xml rename Packs/qualys/Integrations/{QualysEventCollector/QualysEventCollector_dark.svg => Qualysv2/Qualysv2_dark.svg} (100%) rename Packs/qualys/Integrations/{QualysEventCollector/QualysEventCollector_light.svg => Qualysv2/Qualysv2_light.svg} (100%) diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py deleted file mode 100644 index 56873fe515c9..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py +++ /dev/null @@ -1,629 +0,0 @@ -import copy -import demistomock as demisto # noqa: F401 -from CommonServerPython import * # noqa: F401 -from typing import Any -import urllib3 -import csv -import io - -# Disable insecure warnings -urllib3.disable_warnings() - -""" CONSTANTS """ - -DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' -ASSETS_DATE_FORMAT = '%Y-%m-%d' - -API_SUFFIX = "/api/2.0/fo/" -VENDOR = 'qualys' -PRODUCT = 'qualys' -BEGIN_RESPONSE_LOGS_CSV = "----BEGIN_RESPONSE_BODY_CSV" -END_RESPONSE_LOGS_CSV = "----END_RESPONSE_BODY_CSV" -BEGIN_RESPONSE_FOOTER_CSV = "----BEGIN_RESPONSE_FOOTER_CSV" -END_RESPONSE_FOOTER_CSV = "----END_RESPONSE_FOOTER_CSV" -WARNING = 'WARNING' -ACTIVITY_LOGS_NEWEST_EVENT_DATETIME = 'activity_logs_newest_event_datetime' -ACTIVITY_LOGS_NEXT_PAGE = 'activity_logs_next_page' -ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN = 'activity_logs_since_datetime_prev_run' -HOST_DETECTIONS_NEWEST_EVENT_DATETIME = 'host_detections_newest_event_datetime' -HOST_DETECTIONS_NEXT_PAGE = 'host_detections_next_page' -HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN = 'host_detections_since_datetime_prev_run' -HOST_LAST_FETCH = 'host_last_fetch' -ASSETS_FETCH_FROM = '90 days' -MIN_ASSETS_INTERVAL = 59 - -""" CLIENT CLASS """ - - -class Client(BaseClient): - def __init__(self, base_url, username, password, verify=True, proxy=False, headers=None): - super().__init__(base_url, verify=verify, proxy=proxy, headers=headers, auth=(username, password)) - - @staticmethod - def error_handler(res): - """ Handles error on API request to Qyalys """ - err_msg = f"Error in API call [{res.status_code}] - {res.reason}" - try: - simple_response = get_simple_response_from_raw(parse_raw_response(res.text)) - err_msg = f'{err_msg}\nError Code: {simple_response.get("CODE")}\nError Message: {simple_response.get("TEXT")}' - except Exception: - raise DemistoException(err_msg, res=res) - - def get_user_activity_logs(self, since_datetime: str, max_fetch: int = 0, next_page=None) -> Union[str, bytes]: - """ - Make a http request to Qualys API to get user activities logs - Args: - Returns: - response from Qualys API - Raises: - DemistoException: can be raised by the _http_request function - """ - self._headers.update({"Content-Type": 'application/json'}) - params: dict[str, Any] = { - "truncation_limit": max_fetch - } - if since_datetime: - params["since_datetime"] = since_datetime - if next_page: - params["id_max"] = next_page - - response = self._http_request( - method='GET', - url_suffix=urljoin(API_SUFFIX, 'activity_log/?action=list'), - resp_type='text/csv', - params=params, - timeout=60, - error_handler=self.error_handler, - ) - - return response.text - - def get_host_list_detection(self, since_datetime, next_page=None) -> Union[str, bytes]: - """ - Make a http request to Qualys API to get assets - Args: - Returns: - response from Qualys API - Raises: - DemistoException: can be raised by the _http_request function - """ - self._headers.update({"Content-Type": 'application/json'}) - params: dict[str, Any] = { - "truncation_limit": 3, - "vm_scan_date_after": since_datetime - } - if next_page: - params["id_min"] = next_page - - response = self._http_request( - method='GET', - url_suffix=urljoin(API_SUFFIX, 'asset/host/vm/detection/?action=list'), - resp_type='text', - params=params, - timeout=60, - error_handler=self.error_handler, - ) - return response - - def get_vulnerabilities(self, since_datetime) -> Union[str, bytes]: - """ - Make a http request to Qualys API to get vulnerabilities - Args: - Returns: - response from Qualys API - Raises: - DemistoException: can be raised by the _http_request function - """ - self._headers.update({"Content-Type": 'application/json'}) - params: dict[str, Any] = {"last_modified_after": since_datetime} - - response = self._http_request( - method='POST', - url_suffix=urljoin(API_SUFFIX, 'knowledge_base/vuln/?action=list'), - resp_type='text', - params=params, - timeout=60, - error_handler=self.error_handler, - ) - - return response - - -def get_partial_response(response: str, start: str, end: str): - """ Cut response string from start to end tokens. - """ - if start not in response or end not in response: - return None - start_index = response.index(start) + len(start) - end_index = response.index(end) - result = response[start_index:end_index].strip() - if result.startswith(WARNING): - result = result.replace(WARNING, '').strip() - return result - - -def skip_fetch_assets(last_run): - """ Checks if enough time has passed since the previous run. - Args: - last_run: Last run time. - Returns: - Returns true or false if enough time has passed since the previous run. - """ - time_to_check = last_run.get("assets_last_fetch") - if not time_to_check: - return False - passed_minutes = (time.time() - time_to_check) / 60 - if passed_minutes < MIN_ASSETS_INTERVAL: - demisto.info(f"Skipping fetch-assets command. Only {passed_minutes} minutes have passed since the last fetch. " - f"It should be a minimum of 1 hour.") - return True - return False - - -def csv2json(csv_data: str): - """ Converts data from csv to json - Args: - csv_data: data in csv format - Returns: - the same data in json formal - """ - reader = csv.DictReader(io.StringIO(csv_data)) - json_data = list(reader) - return json_data - - -def get_next_page_from_url(url, field): - """ - Get the next page field from url. - """ - match = re.search(rf"{field}=(\d+)", url) - res = match.group(1) if match else None - return res - - -def get_next_page_activity_logs(footer): - """ - Extracts the next token from activity logs response. - """ - if isinstance(footer, list): - footer = footer[0] - next_url = footer.get('URL', '') - max_id = get_next_page_from_url(next_url, 'id_max') - return max_id - - -def handle_host_list_detection_result(raw_response: requests.Response) -> tuple[Optional[list], Optional[str]]: - """ - Handles Host list detection response - parses xml to json and gets the list - Args: - raw_response (requests.Response): the raw result received from Qualys API command - Returns: - List with data generated for the result given - """ - formatted_response = parse_raw_response(raw_response) - simple_response = get_simple_response_from_raw(formatted_response) - if simple_response and simple_response.get("CODE"): - raise DemistoException(f"\n{simple_response.get('TEXT')} \nCode: {simple_response.get('CODE')}") - - response_requested_value = dict_safe_get(formatted_response, - ["HOST_LIST_VM_DETECTION_OUTPUT", "RESPONSE", "HOST_LIST", "HOST"]) - response_next_url = dict_safe_get(formatted_response, - ["HOST_LIST_VM_DETECTION_OUTPUT", "RESPONSE", "WARNING", "URL"], default_return_value='') - if isinstance(response_requested_value, dict): - response_requested_value = [response_requested_value] - - return response_requested_value, response_next_url - - -def handle_vulnerabilities_result(raw_response: requests.Response) -> tuple[Optional[list], Optional[str]]: - """ - Handles vulnerabilities response - parses xml to json and gets the list - Args: - raw_response (requests.Response): the raw result received from Qualys API command - Returns: - List with data generated for the result given - """ - formatted_response = parse_raw_response(raw_response) - - vulnerabilities = dict_safe_get(formatted_response, ['KNOWLEDGE_BASE_VULN_LIST_OUTPUT', 'RESPONSE', 'VULN_LIST', 'VULN']) - if isinstance(vulnerabilities, dict): - vulnerabilities = [vulnerabilities] - - return vulnerabilities - - -def parse_raw_response(response: Union[bytes, requests.Response]) -> dict: - """ - Parses raw response from Qualys. - Load xml as JSON. - Args: - response (Union[bytes, requests.Response]): Response from Qualys service. - - Returns: - (Dict): Dict representing the data returned by Qualys service. - """ - return json.loads(xml2json(response)) - - -def get_simple_response_from_raw(raw_response: Any) -> Union[Any, dict]: - """ - Gets the simple response from a given JSON dict structure returned by Qualys service - If object is not a dict, returns None. - Args: - raw_response (Any): Raw response from Qualys service. - - Returns: - (Union[Any, Dict]): Simple response path if object is a dict, else response as is. - """ - simple_response = None - if raw_response and isinstance(raw_response, dict): - simple_response = raw_response.get("SIMPLE_RETURN", {}).get("RESPONSE", {}) - return simple_response - - -def remove_last_events(events, time_to_remove, time_field): - """ Removes events with certain time. - Args: - events: list of events to remove the time from - time_to_remove: remove events with this time - time_field: the field name where the time is - """ - new_events = [] - for event in events: - if event.get(time_field) == time_to_remove: - demisto.debug(f'Removed activity log event with time: {time_to_remove}, log: {event}') - else: - new_events.append(event) - return new_events - - -def add_fields_to_events(events, time_field_path, event_type_field): - """ - Adds the _time key to the events. - Args: - events: List[Dict] - list of events to add the _time key to. - time_field_path: the list of fields to get _time from - event_type_field: type field in order to distinguish between the API's - Returns: - list: The events with the _time key. - """ - if events: - for event in events: - event['_time'] = dict_safe_get(event, time_field_path) - event['event_type'] = event_type_field - - -def get_detections_from_hosts(hosts): - """ - Parses detections from hosts. - Each host contains list of detections: - {'ID':1, - 'IP': '1.1.1.1', - 'LAST_VM_SCANNED_DATE': '01-01-2020', - 'DETECTION_LIST': {'DETECTION': [first_detection_data, second_detection, ...]} - 'additional_fields': ... - } - - The function parses the data in the following way: - {''ID':1, - 'IP': '1.1.1.1', - 'LAST_VM_SCANNED_DATE': '01-01-2020', - 'DETECTION': first_detection_data - 'additional_fields': ... - }, - {'ID':1, - 'IP': '1.1.1.1', - 'LAST_VM_SCANNED_DATE': '01-01-2020', - 'DETECTION': second_detection_data - 'additional_fields': ... - } - .... - - :param hosts: list of hosts that contains detections. - :return: parsed events. - """ - fetched_events = [] - for host in hosts: - if detections_list := host.get('DETECTION_LIST', {}).get('DETECTION'): - if isinstance(detections_list, list): - for detection in detections_list: - new_detection = copy.deepcopy(host) - del new_detection['DETECTION_LIST'] - new_detection['DETECTION'] = detection - fetched_events.append(new_detection) - elif isinstance(detections_list, dict): - new_detection = copy.deepcopy(host) - new_detection['DETECTION'] = detections_list - del new_detection['DETECTION_LIST'] - fetched_events.append(new_detection) - else: - del host['DETECTION_LIST'] - host['DETECTION'] = {} - fetched_events.append(host) - return fetched_events - - -def get_activity_logs_events(client, since_datetime, max_fetch, next_page=None) -> tuple[Optional[list], dict]: - """ Get logs activity from qualys - API response returns events sorted in descending order. We are saving the next_page param and - sending next request with next_page arg if needed. Saving the newest event fetched. - We are deleting the newest event each time to avoid duplication. - Args: - client: Qualys client - since_datetime: datetime to get events from - max_fetch: max number of events to return - next_page: pagination marking - Returns: - Logs activity events, Next run datetime - """ - demisto.debug(f'Starting to fetch activity logs events: since_datetime={since_datetime}, next_page={next_page}') - activity_logs = client.get_user_activity_logs(since_datetime=since_datetime, max_fetch=max_fetch, next_page=next_page) - activity_logs_events = csv2json(get_partial_response(activity_logs, BEGIN_RESPONSE_LOGS_CSV, - END_RESPONSE_LOGS_CSV) or activity_logs) or [] - footer_json = csv2json(get_partial_response(activity_logs, BEGIN_RESPONSE_FOOTER_CSV, - END_RESPONSE_FOOTER_CSV)) or {} - new_next_page = get_next_page_activity_logs(footer_json) - demisto.debug(f'Got activity logs events from server: {len(activity_logs_events)=}.') - - newest_event_time = activity_logs_events[0].get('Date') if activity_logs_events else since_datetime - - if not next_page: - activity_logs_events = remove_last_events(activity_logs_events, newest_event_time, 'Date') - add_fields_to_events(activity_logs_events, ['Date'], 'activity_log') - - next_run_dict = { - ACTIVITY_LOGS_NEWEST_EVENT_DATETIME: newest_event_time, - ACTIVITY_LOGS_NEXT_PAGE: new_next_page, - ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN: since_datetime, - } - demisto.debug(f'Done to fetch activity logs events: {next_run_dict=}, sending {len(activity_logs_events)} events.') - return activity_logs_events, next_run_dict - - -def get_host_list_detections_events(client, since_datetime) -> list: - """ Get host list detections from qualys - Args: - client: Qualys client - since_datetime: The start fetch date. - Returns: - Host list detections assets - """ - demisto.debug(f'Starting to fetch assets') - assets = [] - next_page = '' - - while True: - host_list_detections = client.get_host_list_detection(since_datetime, next_page=next_page) - host_list_assets, next_url = handle_host_list_detection_result(host_list_detections) or [] - assets += host_list_assets - next_page = get_next_page_from_url(next_url, 'id_min') - if not next_page: - break - - edited_host_detections = get_detections_from_hosts(assets) - demisto.debug(f'Parsed detections from hosts, got {len(edited_host_detections)=} assets.') - - add_fields_to_events(edited_host_detections, ['DETECTION', 'FIRST_FOUND_DATETIME'], 'host_list_detection') - - return edited_host_detections - - -def get_vulnerabilities(client, since_datetime) -> list: - """ Get vulnerabilities list from qualys - Args: - client: Qualys client - since_datetime: The start fetch date. - Returns: - list vulnerabilities - """ - demisto.debug('Starting to fetch vulnerabilities') - host_list_detections = client.get_vulnerabilities(since_datetime) - vulnerabilities = handle_vulnerabilities_result(host_list_detections) or [] - - demisto.debug(f'Parsed detections from hosts, got {len(vulnerabilities)=} vulnerabilities.') - return vulnerabilities - - -def fetch_assets(client): - """ Fetches host list detections - Args: - client: command client - Return: - event: events to push to xsiam - """ - demisto.debug(f'Starting fetch for assets') - since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) - - assets = get_host_list_detections_events(client, since_datetime) - vulnerabilities = get_vulnerabilities(client, since_datetime) - - demisto.info(f"Pulled {len(assets)} assets, and {len(vulnerabilities)} vulnerabilities from API, sending them to XSIAM") - return assets, vulnerabilities - - -def fetch_events(client, last_run, first_fetch_time, fetch_function, newest_event_field, next_page_field, - previous_run_time_field, max_fetch: Optional[int] = 0): - """ Fetches activity logs and host list detections - Args: - client: command client - last_run: last fetch time - first_fetch_time: when start to fetch from - fetch_function: function that gets the events - max_fetch: max number of items to return (0 to return all) - newest_event_field - next_page_field - previous_run_time_field - Return: - next_last_run: where to fetch from next time - event: events to push to xsiam - """ - demisto.debug(f'Starting fetch for {fetch_function.__name__}, last run: {last_run}') - newest_event_time = last_run.get(newest_event_field) if last_run else None - next_page = last_run.get(next_page_field) - previous_time_field = last_run.get(previous_run_time_field) - - if not newest_event_time: - newest_event_time = first_fetch_time - - time_to_fetch = newest_event_time if not next_page else previous_time_field - - events, new_next_run = fetch_function(client, time_to_fetch, max_fetch, next_page) - - updated_next_run = {previous_run_time_field: time_to_fetch} - new_next_page = new_next_run.get(next_page_field) - - # if the fetch is not during the pagination (fetched without next_page) - if not next_page: - # update the newest event - updated_next_run[newest_event_field] = new_next_run.get(newest_event_field) - - # update if there is next page and this fetch is not over - updated_next_run[next_page_field] = new_next_page - - if last_fetch_time := new_next_run.get(HOST_LAST_FETCH): - updated_next_run[HOST_LAST_FETCH] = last_fetch_time - - demisto.info(f"Sending len{len(events)} to XSIAM. updated_next_run={updated_next_run}.") - return updated_next_run, events - - -def get_activity_logs_events_command(client, args, first_fetch_time): - """ - Args: - client: command client - args: Demisto args for this command: limit and since_datetime - first_fetch_time: first fetch time - Retuns: - Command results with activity logs - - """ - limit = arg_to_number(args.get('limit', 50)) - offset = arg_to_number(args.get('offset', 0)) - since_datetime = arg_to_datetime(args.get('since_datetime')) - since_datetime = since_datetime.strftime(DATE_FORMAT) if since_datetime else first_fetch_time - activity_logs_events, _ = get_activity_logs_events( - client=client, - since_datetime=since_datetime, - max_fetch=0, - ) - limited_activity_logs_events = activity_logs_events[offset:limit + offset] # type: ignore[index,operator] - activity_logs_hr = tableToMarkdown(name='Activity Logs', t=limited_activity_logs_events) - results = CommandResults( - readable_output=activity_logs_hr, - raw_response=limited_activity_logs_events, - ) - - return limited_activity_logs_events, results - - -def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) -> str: - """ - Tests API connectivity and authentication' - When 'ok' is returned it indicates the integration works like it is supposed to and connection to the service is - successful. - Raises exceptions if something goes wrong. - Args: - client (Client): HelloWorld client to use. - params (Dict): Integration parameters. - first_fetch_time (int): The first fetch time as configured in the integration params. - Returns: - str: 'ok' if test passed, anything else will raise an exception and will fail the test. - """ - fetch_events( - client=client, - last_run={}, - first_fetch_time=first_fetch_time, - max_fetch=1, - fetch_function=get_activity_logs_events, - newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, - next_page_field=ACTIVITY_LOGS_NEXT_PAGE, - previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, - ) - - fetch_assets(client=client) - - return 'ok' - - -""" MAIN FUNCTION """ - - -def main(): # pragma: no cover - params = demisto.params() - args = demisto.args() - command = demisto.command() - - base_url = params.get('url') - verify_certificate = not params.get("insecure", True) - proxy = params.get("proxy", False) - username = params.get("credentials").get("identifier") - password = params.get("credentials").get("password") - - # How much time before the first fetch to retrieve events - first_fetch_datetime: datetime = arg_to_datetime( # type: ignore[assignment] - arg=params.get('first_fetch', '3 days'), - arg_name='First fetch time', - required=True - ) - first_fetch_str = first_fetch_datetime.strftime(DATE_FORMAT) - - demisto.info(f'Command being called is {command}') - - try: - headers: dict = {"X-Requested-With": "Cortex"} - client = Client( - base_url=base_url, - username=username, - password=password, - verify=verify_certificate, - headers=headers, - proxy=proxy - ) - - if command == 'test-module': - # This is the call made when pressing the integration Test button. - result = test_module(client, params, first_fetch_str) - return_results(result) - - elif command == "qualys-get-events": - should_push_events = argToBoolean(args.get('should_push_events', False)) - events, results = get_activity_logs_events_command(client, args, first_fetch_str) - return_results(results) - if should_push_events: - send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT) - - elif command == 'fetch-events': - last_run = demisto.getLastRun() - max_fetch_activity_logs = arg_to_number(params.get("max_fetch_activity_logs", 0)) - logs_next_run, activity_logs_events = fetch_events( - client=client, - last_run=last_run, - newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, - next_page_field=ACTIVITY_LOGS_NEXT_PAGE, - previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, - fetch_function=get_activity_logs_events, - first_fetch_time=first_fetch_str, - max_fetch=max_fetch_activity_logs, - ) - send_events_to_xsiam(activity_logs_events, vendor=VENDOR, product=PRODUCT) - - # saves next_run for the time fetch-events is invoked - demisto.setLastRun(logs_next_run) - - elif command == 'fetch-assets': - assets_last_run = demisto.getAssetsLastRun() - demisto.debug(f'saved lastrun assets: {assets_last_run}') - if skip_fetch_assets(assets_last_run): - return - execution_start_time = time.time() - assets, vulnerabilities = fetch_assets(client=client) - send_data_to_xsiam(data=assets, vendor=VENDOR, product='host_detections', data_type='assets') - send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities') - demisto.setAssetsLastRun({'assets_last_fetch': execution_start_time}) - - except Exception as e: - return_error(f'Failed to execute {command} command.\nError:\n{str(e)}') - - -if __name__ in ("__main__", "__builtin__", "builtins"): - main() diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml deleted file mode 100644 index f9820aa20c6e..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml +++ /dev/null @@ -1,79 +0,0 @@ -category: Analytics & SIEM -sectionOrder: -- Connect -- Collect -commonfields: - id: QualysEventCollector - version: -1 -configuration: -- defaultvalue: https://qualysguard.qg2.apps.qualys.com - display: Server URL - name: url - required: true - type: 0 - section: Connect -- display: Username - name: credentials - required: true - type: 9 - section: Connect -- display: Trust any certificate (not secure) - name: insecure - required: false - type: 8 - section: Connect - advanced: true -- display: Use system proxy settings - name: proxy - required: false - type: 8 - section: Connect - advanced: true -- defaultvalue: 3 days - section: Collect - display: First event fetch time - name: first_fetch - additionalinfo: If "First event fetch time" is set for a long time ago, it may cause performance issues. - required: true - type: 0 -- defaultvalue: 10000 - section: Collect - display: Event Fetch Limit - name: max_fetch_activity_logs - additionalinfo: Maximum number of events to fetch per fetch iteration. - required: true - type: 0 -description: Qualys Event Collector fetches Activity Logs (Audit Logs) and Host Vulnerabilities. -display: Qualys VMDR -name: QualysEventCollector -script: - commands: - - arguments: - - auto: PREDEFINED - defaultValue: 'false' - description: If true, the command will create events, otherwise it will only display them. - name: should_push_events - predefined: - - 'true' - - 'false' - required: true - - description: Maximum number of results to return. - name: limit - - description: Date to return results from. - name: since_datetime - - description: Offset which events to return. - name: offset - description: Gets activity logs from Qualys. - name: qualys-get-events - dockerimage: demisto/python3:3.10.13.84405 - isfetchevents: true - isfetchassets: true - runonce: false - script: '' - subtype: python3 - type: python -marketplaces: -- marketplacev2 -fromversion: 8.2.0 -tests: -- No tests (auto formatted) diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_description.md b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_description.md deleted file mode 100644 index 772eab628d0c..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_description.md +++ /dev/null @@ -1,20 +0,0 @@ -## Qualys Vulnerability Management Help - -- You need a Qualys user account to use the Qualys integration. If a subscription has multiple users, all users with any user role (except Contact) can use the Qualys integration. Each user’s permissions correspond to their assigned user role. - -- Qualys Vulnerability Management uses basic authentication. You'll need your Qualys login credentials in order to use this integration. - -- You can get your server URL by identifying your platform in this link: https://www.qualys.com/platform-identification/ - -- Qualys user accounts that have been enabled with VIP two-factor authentication can be used with the Qualys API, however two-factor authentication will not be used when making API requests. Two-factor authentication is only supported when logging into the Qualys GUI. - -### Fetch Information - -- There are two event types that are fetched for the Event Collector: - * Activity logs. - * Hosts Vulnerability. -You can adjust the fetch interval using the *Activity Logs Fetch Interval* and *Vulnerability Fetch Interval* arguments. - -- **Note**: We recommend setting "First Fetch Time" to fetch logs from no more than the last 3 days for each fetch. Using a greater fetch time, may cause performance issues. - -- Vulnerabilities in the dataset have event_type = "host_list_detections". diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_image.png b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_image.png deleted file mode 100644 index 5aba8a836b37fa0642fa02a0200832f8741ad9d6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3555 zcmV<94IJ``P)u# z$|sS6MQ9a-5b#wDLLlVHQ-~-@z&tj|zOMh-+}-Rhcays@0Y1I|_f2N*nfIKTbIzRE z5b4mNLx&C>I&|pJp+kob9X}rk3K8-=k}B7yW=Jj?LoyAN1q}f;5n1tQ&Vb;+ca==? z36jN8iNt3vNzU1tT9S81HaSVhIfW($%NZG_Q?{Bh645kC&}f+`jzWbjSCb^RI$iQ# zyX-5?B^~Dktunac(v<5YgY<+f%bChbE}&;K8ZY$cXl(N!_|rZ1(#kqGP-?`OF!r zTQHC8TQ`&Ja5NV-7|4th9{#&oBwgIoOLkRMQ1!ick?ZTP1G}t=&o7CgA32wS0|%N8 z95`Sn_ny(gP}jkOQ*DS$zO|&aqt}X(?nLf1BU8^_i!j zcWX!!8XKwZ$vN!F@{%l*CZAOdZWOHxAW~9NUW4Oh5z^6!tq9AJz6)WD%jLQgi@6qQ z)){OptV~W$zK?#UP+e4+?JOVh9X2Y{o98S%K=v;_rBLA5pHGfGyU92-qp7{ienD1h z8*n1;M&x1GE`z~vM^RDHk$_@rARdR>?e_Ixb}vbimiO=9AH|A78~9HEstguQyWcIQ zMNA9~tRCJx+m3D27C1{wybP8uNkY%)+GCjE2vSb#4-wG0{HXDQ^0@~(b`*4MgIZY_ zTVM6-*Dn)Iv))U>$zFvxnS1Nit5>J+@Nl;nX=rFzbM)xZZ+zgii2vn6UnQvc_I;4qmQ>rOG`In^Gyb=6ZGwlva+(>D#J4H21S`Tl-Ywa z+XM3loTov~TBS5;MIpE`A_mNY>9ngl+2(jq(pf0}JE z=kepz7C684v2`Sgq)wfi`xBy3Xs~>-u&gV(VWL=n*Mq^B>j9W^7?H=K>w|?F#tEa* z$fEsk9H6Ir1=fRMbfuF2bY^Czca6aKL9Exopk)Y?K)MBinfehJ_bSrUxgNf+2{I(! z#8-0}Flu#RCZtR{=&vjJ5lln7Baz<8^FX^(khdZ(E{->briUU`1`C<$JZZVXe+>n$ zKy85(73KFWyPUrLX_DBO+;}CKi{1U1mO5bOCPZ$70Sh&j+K8S#dv?dE%AqDHV8>YA zLHr2fB?!lmo{BKEwRTYU6>2&_-N3xD0eSq*j4Q#oxA2fb`zIKih&UHvHfTloc4TB^ zgp&pYJHcKHid~E{3?*-cuHC6EFr@TrZol4&7vn0QT%kqWU|z!G4%XyHk~M z{+GuMag8^2>cceH6OW;<7a>71yLaz?MP6Q>rb8jA6Xj-41k|H*|32?$*r?GIDh$xM z#^EEp-;TX|{H$t0a;tJ5qLM|2M|?C;Xz(mX1hoN)iHVV52J2m3{AVCNKKIQBqbr~z zS-P~UW^!_J*zj1bv^AFP0Zv1GT+ap}l8s>I%S?lPPzxGM2UbsOaC^8Hv@=Q@nh?ZQ zNqc?2ZJ+P-rVRrIk+FXW2LZ-Z#2;ppury1Q#`SCc`y;uuwPut_Rl3W|@+G7Z*ccpn ztilp9Y3WOq z!3hS-26X7UrUQ&`HFc;S27W8`%mVXWt`Q`{CObi`56n3rQ zDAd5T#>U1Wp!4Zwak$ubuEC&gMd#<&VFX)zY5b<^`$!Nz<$j8Kei4aIom5(tC~DCX z3ZFK^kIc+vU$kFeB(gxR8djk?g!T^7S+|92D2x}$Hz;|wDk0J+O=y}H=K}P`}V6`$2`}XN5PQcyzpFy zH_`6hyA#16wstjn1{q)&>%FfK%oyWz_gVgra5emU;+6dN1M ze(irLO|St*Jcgx9mqu~E8iEeUWsn!jK1PG<)OS?#&@_^36@wQil;4D7-^xb(OE*)S zE4!%fx%qBCbmO{gavmt4`lWw&hXBYzcC8?dfLO9(@#j#}xMHvXe@46#Cv0F=Ru(%E z*o|p9dGchY#bU`u*(a2$+p$Q``A(0Ng#!K7goK23C_59nah}<1&Y@%iGE9}M4t{7|h=p2I37%@T&i?@CW5tYrRXkN+Gx1@yl=MWqR%B1fEK+W z)^T>g;Tvd4%!|`>x)@~S-R9N^8 zwE2Hx{{V2n&*%cCjH^jSO?t3b8=!?8!1m37) z(AUR-WpL+>h>VQ92W9wGEDG&!!@L(_Ugew*2495DlZ-Lfqu)J&^;>m!Ri>p*MX6WM zst6yodHn5Ux#u?|UDPFb17NJ{WI5Tkd_>m2yyzC+pt<0lye}cY%Z=1p}60F}{Ffp*%M?SM$B`TtP9SUclHNBezN};nUpiKUC1<&#XvGS%QK5UbO+Z zAkDWN2hwK}#<>a` zumJbmafc5dK1v+`%?v)Bo*w6NT34XPcuf_U;c!tp%SxNXGP%XlNAdBcH;Ej&;6&!R z2E_9a=uSgnVIg0YI&1@m^v`FNusr1m=_OQ}97;VNy^!T)o%fYKC@#R$^iziBi#a2{ ztgIS_(G+lqy+hgn#x?2;UB!WTc9&UO}TGTQ2<#1@^XBpv4sA&KZfbS@}37Qe4}46LXu zSWs*Q;&vzR7cMcQ^Q{wqr8D>Z<42ufUo`!@sTgIn7`Mg#zes1S4jnpl=+L1 **Configurations** > **Data Collection** > **Automation & Feed Integrations**. -2. Search for Qualys Event Collector. -3. Click **Add instance** to create and configure a new integration instance. - - | **Parameter** | **Description** | **Required** | - | --- | --- | --- | - | Server URL | | True | - | Username | | True | - | Password | | True | - | Trust any certificate (not secure) | | False | - | Use system proxy settings | | False | - | First fetch time | If "First Fetch Time" is set for a long time ago, it may cause performance issues. | True | - | Vulnerability Fetch Interval | Time between fetches of vulnerabilities \(for example 12 hours, 60 minutes, etc.\). | True | - | Activity Logs Fetch Interval | Time between fetches of activity logs. | False | - | Activity Logs Fetch Limit | Maximum number of activity logs to fetch per fetch iteration. | True | - | Host Detections Fetch Limit | Maximum number of hosts to return in a single fetch iteration. | True | - -4. Click **Test** to validate the URLs, token, and connection. - -## Commands - -You can execute these commands from the Cortex XSIAM CLI, as part of an automation, or in a playbook. -After you successfully execute a command, a DBot message appears in the War Room with the command details. - -### qualys-get-activity-logs - -*** -Gets activity logs from Qualys. - -#### Base Command - -`qualys-get-activity-logs` - -#### Input - -| **Argument Name** | **Description** | **Required** | -| --- | --- | --- | -| should_push_events | If true, the command will create events, otherwise it will only display them. Possible values are: true, false. Default is false. | Required | -| limit | Maximum results to return. | Optional | -| since_datetime | Date to return results from. | Optional | -| offset | Offset which events to return. | Optional | - -#### Context Output - -There is no context output for this command. -### qualys-get-host-detections - -*** -Gets host detections from Qualys. - -#### Base Command - -`qualys-get-host-detections` - -#### Input - -| **Argument Name** | **Description** | **Required** | -| --- | --- | --- | -| should_push_events | If true, the command will create events, otherwise it will only display them. Possible values are: true, false. Default is false. | Required | -| limit | Maximum number of results to return. | Optional | -| offset | Offset which events to return. | Optional | -| vm_scan_date_after | Date to return results from. | Optional | - -#### Context Output - -There is no context output for this command. diff --git a/Packs/qualys/Integrations/QualysEventCollector/command_examples b/Packs/qualys/Integrations/QualysEventCollector/command_examples deleted file mode 100644 index ff11963bcedb..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/command_examples +++ /dev/null @@ -1,2 +0,0 @@ -!qualys-get-host-detections limit=2 -!qualys-get-activity-logs limit=2 \ No newline at end of file diff --git a/Packs/qualys/Integrations/QualysEventCollector/test_data/activity_logs.csv b/Packs/qualys/Integrations/QualysEventCollector/test_data/activity_logs.csv deleted file mode 100644 index b8f861f7469d..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/test_data/activity_logs.csv +++ /dev/null @@ -1,20 +0,0 @@ -"Date","Action","Module","Details","User Name","User Role","User IP" -"2023-05-24T09:55:35Z","request","auth","API: /api/2.0/fo/asset/host/vm/detection/index.php","demisto-user","Manager","1.2.2.2" -"2023-05-24T09:55:35Z","request","auth","API: /api/2.0/fo/asset/host/vm/detection/index.php","demisto-user","Manager","1.2.2.2" -"2023-05-17T08:06:55Z","finished","report","Report generation finished.(TITLE:, ID:14359440, FORMAT:PDF, Report Size:19.38 KB, Report Duration:12 seconds)","demisto-user","Manager","N/A" -"2023-05-17T08:05:44Z","update","account","Network Global Default Network IPs removed from Excluded IPs: 1.1.1.1","demisto-user","Manager","1.2.2.3" -"2023-05-16T14:49:07Z","options","scan","API scan (ref: scan/1684248547.49723) options: Standard TCP port list, Standard UDP port list, parallel ML scaling disabled for appliances, Load balancer detection OFF, Intrusive Checks: Excluded, ICMP Host Discovery, Overall Performance: Normal, Hosts to Scan in Parallel - External Scanners: 15, Hosts to Scan in Parallel - Scanner Appliances: 30, Total Processes to Run in Parallel: 10, HTTP Processes to Run in Parallel: 10, Packet (Burst) Delay: Medium, Intensity: Normal, appliances: External, target: 1.1.1.1","demisto-user","Manager","1.2.2.3" -"2023-05-16T14:48:41Z","update","option","API-IPs/Ranges updated: 1.1.1.4","demisto-user","Manager","1.2.2.3" -"2023-05-16T14:48:41Z","set","host_attribute","comment=[update] for 1.1.1.4","demisto-user","Manager","1.2.2.3" -"2023-05-14T16:13:53Z","update","addon","Updated subscription: 112155 for addon: [PC] set is_trial from 1 to 0","demisto-user","Manager","35.202.234.166" -"2023-05-14T15:04:55Z","set","admin","ML Custom Scan Header disabled, Total IPs purchased for Security Configuration Assessment set from '0' to '', Account End Date set from '01/18/2024 at 23:59:59 GMT' to '01/18/2024 at 23:59:59 GMT', Expiry Notification Enable, VM Agent Purchased set to: 10, PC Agent Purchased set to: 5, CM Asset Access set to: EXTERNAL for demisto-user by quays2el7","demisto-user","Manager","1.2.2.3" -"2023-03-21T02:46:36Z","add","appliance","Appliances {4567856} to Asset Group ID 87635234","demisto-user","Manager","1.1.1.5" -"2023-03-04T16:02:03Z","cancel","cm_scan","cm_scan (ref: compliance/16702578.45786) canceled by system, exceeded maximum queue time (30 days), target: 1.1.1.1","demisto-user","Manager","N/A" -"2023-03-03T03:08:08Z","delete","schedule","scheduled task '1.2.2.3' deleted. Type: 'SCAN'","demisto-user","Manager","1.1.1.5" -"2023-03-03T02:48:49Z","add","vhost","Added Virtual Hosts '1.2.2.3:22:=www.example.com'","demisto-user","Manager","1.1.1.5" -"2023-03-03T02:46:36Z","update","asset_group","Update Asset Group {""requestId"":""08003610-d1f4-4bd6-818d-68c206acef9b"", ""assetGroupId"":""3035480"", ""name"":""XSOAR_LAB"", ""businessImpact"":""HIGH"", ""cvssDamagePotential"":null, ""cvssTargetDistribution"":null}","demisto-user","Manager","1.1.1.5" -"2023-03-02T03:15:41Z","set","host_attribute","owner=demst2dm for 1.2.2.3","demisto-user","Manager","1.1.1.5" -"2023-01-16T14:04:48Z","entity","account","Entity ReFresh {""requestId"":""12344567890"", ""entityName"":""Subscription"", ""operation"":""Update"", ""entityIds"":[""112155""]}","test2tk4","N/A","1.1.2.8" -"2023-01-07T08:13:56Z","create","asset_group","Create Asset Group {""requestId"":""12345678908b"", ""networkId"":null, ""ips"":""8.8.8.8"", ""name"":""Test-XSOAR-ASM"", ""netBios"":{""netBiosToAdd"":""junk""}, ""businessImpact"":""HIGH"", ""businessDivision"":""dev"", ""businessFunction"":""dev"", ""businessLocation"":""dev"", ""cvssDamagePotential"":null, ""cvssTargetDistribution"":null}","demisto-user","Manager","1.1.1.4" -"2023-01-03T03:19:44Z","set","host_attribute","comment=[Test] owner=demst2es Location=[santa clara] Function=[dev] Asset Tag=[dev] for 1.1.2.9","demisto-user","Manager","1.1.1.5" -"2023-01-01T08:06:44Z","log","report","Template settings:; Trend Duration:Last 30 days; Timeframe Selection Range: None; Group By:Hosts; Status:Passed, Failed and Error; Criticality:UNDEFINED, MINIMAL, MEDIUM, SERIOUS, CRITICAL, URGENT; Control Statistics:No; Host Statistics:Yes; Cloud Metadata:No; Report details:Yes; Show control rationale:Yes; Show hosts summary:Yes; Show control evidence:Yes; Show scan parameter:Yes; Show extended evidence:Yes; Show control exceptions:Yes; Show exception history:Yes; Show control glossary:No; Show Appendix:Yes; Frameworks Selected:All; title='Policy Report Template'; global='Yes'; Trend Duration= Last 30 days; Group By=Hosts; Status=Passed, Failed and Error; Criticality=UNDEFINED, MINIMAL, MEDIUM, SERIOUS, CRITICAL, URGENT; User access=; include_appendix = 1; include_control_comments = ; include_control_evidence = 1; include_control_exceptions = 1; include_control_ext_evidence = 1; include_control_mappings = ; include_control_rationale = 1; include_control_scan_parameter = 1; include_control_stats = ; include_control_summary = ; include_control_tech_summary = ; include_exceptions_history = 1; include_glossary = ; include_host_stats = 1; include_host_summary = 1; include_posture_status = 7; include_report_details = 1; include_report_summary = ; report_group_by = group_by_hosts; report_sort_by = ; trend_duration = 30; use_framework_filtering = 0; Owner = 237849","demisto-user","Manager","N/A" diff --git a/Packs/qualys/Integrations/QualysEventCollector/test_data/host_list_detections_raw.xml b/Packs/qualys/Integrations/QualysEventCollector/test_data/host_list_detections_raw.xml deleted file mode 100644 index 33249987ec7e..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/test_data/host_list_detections_raw.xml +++ /dev/null @@ -1,307 +0,0 @@ - - - - - - 2023-05-24T09:59:43Z - - - 143111841 - 1.1.1.1 - DNS - - - - - - - - 2023-05-16T15:26:53Z - 2023-05-16T15:26:01Z - 2130 - 2022-12-06T12:03:46Z - - - 11827 - Confirmed - 2 - 443 - tcp - 0 - - Active - 2023-05-16T15:26:53Z - 2023-05-16T15:26:01Z - 216 - 2023-05-16T15:26:01Z - 2023-05-16T15:26:53Z - 2022-12-07T10:06:40Z - 0 - 0 - 2023-05-16T15:26:53Z - - - 15033 - Confirmed - 4 - 53 - udp - 0 - - Active - 2023-05-16T15:26:53Z - 2023-05-16T15:26:01Z - 217 - 2023-05-16T15:26:01Z - 2023-05-16T15:26:53Z - 0 - 0 - 2023-05-16T15:26:53Z - - - 15034 - Confirmed - 2 - 53 - udp - 0 - - Active - 2023-05-16T15:26:53Z - 2023-05-16T15:26:01Z - 217 - 2023-05-16T15:26:01Z - 2023-05-16T15:26:53Z - 0 - 0 - 2023-05-16T15:26:53Z - - - 15068 - Confirmed - 2 - 53 - udp - 0 - - Active - 2023-05-14T15:04:55Z - 2023-05-16T15:26:01Z - 216 - 2023-05-16T15:26:01Z - 2023-05-16T15:26:53Z - 2022-06-19T18:31:48Z - 0 - 0 - 2023-05-16T15:26:53Z - - - 38628 - Confirmed - 3 - 443 - tcp - 1 - - Active - 2023-05-14T15:04:55Z - 2023-05-16T15:26:01Z - 217 - 2023-05-16T15:26:01Z - 2023-05-16T15:26:53Z - 0 - 0 - 2023-05-16T15:26:53Z - - - 38794 - Confirmed - 3 - 443 - tcp - 1 - - Active - 2023-05-14T15:04:55Z - 2023-05-16T15:26:01Z - 213 - 2023-05-16T15:26:01Z - 2023-05-16T15:26:53Z - 0 - 0 - 2023-05-16T15:26:53Z - - - - - 10112200 - 1.2.2.2 - IP - - - - - - - 2023-03-21T03:43:11Z - 2023-03-21T03:42:05Z - 404 - - - 38726 - Potential - 3 - 0 - - New - 2023-05-16T15:26:53Z - 2023-03-21T03:42:05Z - 1 - 2023-03-21T03:42:05Z - 2023-03-21T03:43:11Z - 0 - 0 - 2023-03-21T03:43:11Z - - - 38739 - Confirmed - 3 - 22 - tcp - 0 - - New - 2023-03-21T03:42:05Z - 2023-03-21T03:42:05Z - 1 - 2023-03-21T03:42:05Z - 2023-03-21T03:43:11Z - 0 - 0 - 2023-03-21T03:43:11Z - - - - - - - - diff --git a/Packs/qualys/Integrations/QualysEventCollector/test_data/vulnerabilities_raw.xml b/Packs/qualys/Integrations/QualysEventCollector/test_data/vulnerabilities_raw.xml deleted file mode 100644 index 7dd200b979de..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/test_data/vulnerabilities_raw.xml +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - 2024-02-12T15:21:40Z - - - 10052 - Vulnerability - 3 - <![CDATA[Vtecrm Vtenext Multiple Security Vulnerabilities]]> - CGI - 2023-12-19T12:02:30Z - 2021-01-21T12:51:22Z - 0 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1 - - - - - - - - 1 - Exploit Available - - - - 10186 - Vulnerability - 2 - <![CDATA[]]> - CGI - 2024-02-10T12:03:39Z - 2000-11-10T11:00:00Z - - - - - - - 1 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1 - - - - - 1 - Patch Available, Exploit Available - - - - - - - diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_dark.svg b/Packs/qualys/Integrations/Qualysv2/Qualysv2_dark.svg similarity index 100% rename from Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_dark.svg rename to Packs/qualys/Integrations/Qualysv2/Qualysv2_dark.svg diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_light.svg b/Packs/qualys/Integrations/Qualysv2/Qualysv2_light.svg similarity index 100% rename from Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_light.svg rename to Packs/qualys/Integrations/Qualysv2/Qualysv2_light.svg diff --git a/Packs/qualys/Integrations/Qualysv2/README.md b/Packs/qualys/Integrations/Qualysv2/README.md index 12831fcde09b..c14de519ccda 100644 --- a/Packs/qualys/Integrations/Qualysv2/README.md +++ b/Packs/qualys/Integrations/Qualysv2/README.md @@ -1,4 +1,4 @@ -Qualys Vulnerability Management lets you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. +Qualys VMDR lets you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. This integration was integrated and tested with version 2.0 of QualysVulnerabilityManagement ## Changes compared to V1 @@ -59,13 +59,15 @@ This integration was integrated and tested with version 2.0 of QualysVulnerabili 2. Search for Qualys v2. 3. Click **Add instance** to create and configure a new integration instance. - | **Parameter** | **Required** | - | --- | --- | - | Server URL | True | - | Username | True | - | Password | True | + | **Parameter** | **Required** | + |------------------------------------| --- | + | Server URL | True | + | Username | True | + | Password | True | | Trust any certificate (not secure) | False | - | Use system proxy settings | False | + | Use system proxy settings | False | + | First fetch time | True | + | Fetch event Limit | True | 4. Click **Test** to validate the URLs, token, and connection. From 31cdfd9ca90af7e32a91343b00ba3b0a3461c7ae Mon Sep 17 00:00:00 2001 From: Moshe Date: Wed, 13 Mar 2024 14:30:45 +0200 Subject: [PATCH 06/16] update redame, rn --- .../qualys/Integrations/Qualysv2/Qualysv2.py | 73 +++++++++---------- Packs/qualys/Integrations/Qualysv2/README.md | 7 +- Packs/qualys/ReleaseNotes/3_0_0.md | 8 ++ Packs/qualys/pack_metadata.json | 2 +- 4 files changed, 49 insertions(+), 41 deletions(-) create mode 100644 Packs/qualys/ReleaseNotes/3_0_0.md diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index ad5c5311c711..2f805bd42079 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -3032,48 +3032,37 @@ def get_activity_logs_events_command(client, args, first_fetch_time): return limited_activity_logs_events, results -# def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) -> str: -# """ -# Tests API connectivity and authentication' -# When 'ok' is returned it indicates the integration works like it is supposed to and connection to the service is -# successful. -# Raises exceptions if something goes wrong. -# Args: -# client (Client): HelloWorld client to use. -# params (Dict): Integration parameters. -# first_fetch_time (int): The first fetch time as configured in the integration params. -# Returns: -# str: 'ok' if test passed, anything else will raise an exception and will fail the test. -# """ -# fetch_events( -# client=client, -# last_run={}, -# first_fetch_time=first_fetch_time, -# max_fetch=1, -# fetch_function=get_activity_logs_events, -# newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, -# next_page_field=ACTIVITY_LOGS_NEXT_PAGE, -# previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, -# ) -# -# fetch_assets(client=client) -# -# return 'ok' - -@logger -def test_module(client: Client) -> str: +def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) -> str: """ - Makes a http request to qualys API in order to test the connection + Tests API connectivity and authentication' + When 'ok' is returned it indicates the integration works like it is supposed to and connection to the service is + successful. + Raises exceptions if something goes wrong. Args: - client: Client object for making a http request + client (Client): HelloWorld client to use. + params (Dict): Integration parameters. + first_fetch_time (int): The first fetch time as configured in the integration params. Returns: - 'ok' message if the connection test was successful - Raises: - DemistoException: will be raised when connection was not successful by command_http_request + str: 'ok' if test passed, anything else will raise an exception and will fail the test. """ build_args_dict(None, COMMANDS_ARGS_DATA["test-module"], False) client.command_http_request(COMMANDS_API_DATA["test-module"]) - return "ok" + + if params.get('isFetchEvents'): + fetch_events( + client=client, + last_run={}, + first_fetch_time=first_fetch_time, + max_fetch=1, + fetch_function=get_activity_logs_events, + newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, + next_page_field=ACTIVITY_LOGS_NEXT_PAGE, + previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, + ) + if params.get('isFetchAssets'): + fetch_assets(client=client) + + return 'ok' @logger @@ -3407,7 +3396,7 @@ def main(): # pragma: no cover first_fetch_str = first_fetch_datetime.strftime(DATE_FORMAT) if command == "test-module": - text_res = test_module(client) + text_res = test_module(client, params, first_fetch_str) return_results(text_res) elif command == "qualys-get-events": @@ -3417,6 +3406,14 @@ def main(): # pragma: no cover if should_push_events: send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT) + elif command == "qualys-get-assets": + should_push_events = argToBoolean(args.get('should_push_events', False)) + assets, vulnerabilities = fetch_assets(client=client) + return_results(f'Pulled {len(assets)} assets, and {len(vulnerabilities)} vulnerabilities from API') + if should_push_events: + send_data_to_xsiam(data=assets, vendor=VENDOR, product='host_detections', data_type='assets') + send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities', data_type='assets') + elif command == 'fetch-events': last_run = demisto.getLastRun() max_fetch_activity_logs = arg_to_number(params.get("max_fetch_activity_logs", 0)) @@ -3443,7 +3440,7 @@ def main(): # pragma: no cover execution_start_time = time.time() assets, vulnerabilities = fetch_assets(client=client) send_data_to_xsiam(data=assets, vendor=VENDOR, product='host_detections', data_type='assets') - send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities') + send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities', data_type='assets') demisto.setAssetsLastRun({'assets_last_fetch': execution_start_time}) else: diff --git a/Packs/qualys/Integrations/Qualysv2/README.md b/Packs/qualys/Integrations/Qualysv2/README.md index c14de519ccda..8cfabd241b8f 100644 --- a/Packs/qualys/Integrations/Qualysv2/README.md +++ b/Packs/qualys/Integrations/Qualysv2/README.md @@ -53,10 +53,10 @@ This integration was integrated and tested with version 2.0 of QualysVulnerabili 11. New playbook - qualys-report-launch-compliance-policy-and-fetch.yml -## Configure Qualys v2 on Cortex XSOAR +## Configure Qualys VMDR on Cortex XSOAR 1. Navigate to **Settings** > **Integrations** > **Servers & Services**. -2. Search for Qualys v2. +2. Search for Qualys VMDR. 3. Click **Add instance** to create and configure a new integration instance. | **Parameter** | **Required** | @@ -71,6 +71,9 @@ This integration was integrated and tested with version 2.0 of QualysVulnerabili 4. Click **Test** to validate the URLs, token, and connection. +## Notes: +- ***Fetch assets and vulnerabilities*** command fetches assets and vulnerabilities from the last 90 days only. + ## Asset Tag Commands There are several API endpoints on the Qualys API that can be used in the QualysV2 integration configuration as the `SERVER URL` parameter. When using `asset-tag` commands, the [official documentation](https://www.qualys.com/docs/qualys-asset-management-tagging-api-v2-user-guide.pdf) recommends that the `SERVER URL` parameter should be in the following format: `https://qualysapi..apps.qualys.com/`. diff --git a/Packs/qualys/ReleaseNotes/3_0_0.md b/Packs/qualys/ReleaseNotes/3_0_0.md new file mode 100644 index 000000000000..f1946cccba6a --- /dev/null +++ b/Packs/qualys/ReleaseNotes/3_0_0.md @@ -0,0 +1,8 @@ + +#### Integrations +##### Qualys VMDR (formerly QualysV2) +- Updated the Docker image to: *demisto/python3:3.10.13.87159*. +- Updated the display name of the **QualysV2** integration to **Qualys VMDR**. +- Added the following commands: + - ***fetch-events*** command to fetch audit logs. + - ***fetch-assets*** command to fetch assets and vulnerabilities. diff --git a/Packs/qualys/pack_metadata.json b/Packs/qualys/pack_metadata.json index 3720a9d68b7d..4bf840be3a18 100644 --- a/Packs/qualys/pack_metadata.json +++ b/Packs/qualys/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Qualys", "description": "Qualys Vulnerability Management let's you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance", "support": "xsoar", - "currentVersion": "2.0.11", + "currentVersion": "3.0.0", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", From 8e328a05ade1b6c408fcc6a3da75d110c7985a18 Mon Sep 17 00:00:00 2001 From: Moshe Date: Thu, 21 Mar 2024 09:26:07 +0200 Subject: [PATCH 07/16] update rn. and added qualys-get-assets to yml --- Packs/qualys/Integrations/Qualysv2/Qualysv2.py | 2 +- Packs/qualys/Integrations/Qualysv2/Qualysv2.yml | 11 +++++++++++ Packs/qualys/ReleaseNotes/3_0_0.md | 10 ++++++---- 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index 2f805bd42079..3ec828e4061e 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -3407,7 +3407,7 @@ def main(): # pragma: no cover send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT) elif command == "qualys-get-assets": - should_push_events = argToBoolean(args.get('should_push_events', False)) + should_push_events = argToBoolean(args.get('should_push_assets', False)) assets, vulnerabilities = fetch_assets(client=client) return_results(f'Pulled {len(assets)} assets, and {len(vulnerabilities)} vulnerabilities from API') if should_push_events: diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml index 14968459796c..2e5771e9c0ae 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml @@ -2800,6 +2800,17 @@ script: name: offset description: Gets activity logs from Qualys. name: qualys-get-events + - arguments: + - auto: PREDEFINED + defaultValue: 'false' + description: If true, the command will create assets, otherwise it will only display the amount of available assets. + name: should_push_assets + predefined: + - 'true' + - 'false' + required: true + description: Gets assets from Qualys. + name: qualys-get-assets dockerimage: demisto/python3:3.10.13.87159 isfetchevents: true isfetchassets: true diff --git a/Packs/qualys/ReleaseNotes/3_0_0.md b/Packs/qualys/ReleaseNotes/3_0_0.md index f1946cccba6a..d5dba2689745 100644 --- a/Packs/qualys/ReleaseNotes/3_0_0.md +++ b/Packs/qualys/ReleaseNotes/3_0_0.md @@ -2,7 +2,9 @@ #### Integrations ##### Qualys VMDR (formerly QualysV2) - Updated the Docker image to: *demisto/python3:3.10.13.87159*. -- Updated the display name of the **QualysV2** integration to **Qualys VMDR**. -- Added the following commands: - - ***fetch-events*** command to fetch audit logs. - - ***fetch-assets*** command to fetch assets and vulnerabilities. +- Renamed the integration from ***Qualys v2*** to ***Qualys VMDR***. +- Added the following commands: + - ***fetch-events*** + - ***fetch-assets*** + - ***qualys-get-events*** + - ***qualys-get-assets*** From c0703f7bc75b17e849b60d26a35cbc0e77102255 Mon Sep 17 00:00:00 2001 From: Moshe Date: Thu, 21 Mar 2024 10:47:48 +0200 Subject: [PATCH 08/16] fixed --- Packs/qualys/Integrations/Qualysv2/Qualysv2.py | 8 ++++---- Packs/qualys/Integrations/Qualysv2/Qualysv2.yml | 6 +++--- Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py | 6 ++++-- Packs/qualys/ReleaseNotes/3_0_0.md | 4 ++-- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index 3ec828e4061e..f1a9a98832b0 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -2904,8 +2904,8 @@ def get_host_list_detections_events(client, since_datetime) -> list: Returns: Host list detections assets """ - demisto.debug(f'Starting to fetch assets') - assets = [] + demisto.debug('Starting to fetch assets') + assets = List[Any] next_page = '' while True: @@ -2947,8 +2947,8 @@ def fetch_assets(client): Return: event: events to push to xsiam """ - demisto.debug(f'Starting fetch for assets') - since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) + demisto.debug('Starting fetch for assets') + since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) # type: ignore[union-attr] assets = get_host_list_detections_events(client, since_datetime) vulnerabilities = get_vulnerabilities(client, since_datetime) diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml index 2e5771e9c0ae..b2f0409d7f28 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml @@ -35,14 +35,14 @@ configuration: display: First event fetch time name: first_fetch additionalinfo: If "First event fetch time" is set for a long time ago, it may cause performance issues. - required: true + required: false type: 0 - defaultvalue: 10000 section: Collect display: Event Fetch Limit name: max_fetch_activity_logs additionalinfo: Maximum number of events to fetch per fetch iteration. - required: true + required: false type: 0 description: Qualys Vulnerability Management lets you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. display: Qualys VMDR @@ -2811,7 +2811,7 @@ script: required: true description: Gets assets from Qualys. name: qualys-get-assets - dockerimage: demisto/python3:3.10.13.87159 + dockerimage: demisto/python3:3.10.14.90585 isfetchevents: true isfetchassets: true runonce: false diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py index bf978b44eaa1..6b2cf36f4478 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py @@ -154,10 +154,12 @@ def test_fetch_assets_command(requests_mock): with open('./test_data/vulnerabilities_raw.xml') as f: vulnerabilities = f.read() requests_mock.get(f'{base_url}api/2.0/fo/asset/host/vm/detection/' - f'?action=list&truncation_limit=3&vm_scan_date_after={arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', text=assets) + f'?action=list&truncation_limit=3&vm_scan_date_after=' + f'{arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', text=assets) requests_mock.post(f'{base_url}api/2.0/fo/knowledge_base/vuln/' - f'?action=list&last_modified_after={arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', text=vulnerabilities) + f'?action=list&last_modified_after={arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', + text=vulnerabilities) client = Client(base_url=base_url, verify=True, diff --git a/Packs/qualys/ReleaseNotes/3_0_0.md b/Packs/qualys/ReleaseNotes/3_0_0.md index d5dba2689745..5b53d7b4bcd1 100644 --- a/Packs/qualys/ReleaseNotes/3_0_0.md +++ b/Packs/qualys/ReleaseNotes/3_0_0.md @@ -1,7 +1,7 @@ #### Integrations -##### Qualys VMDR (formerly QualysV2) -- Updated the Docker image to: *demisto/python3:3.10.13.87159*. +##### Qualys VMDR +- Updated the Docker image to: *demisto/python3:3.10.14.90585*. - Renamed the integration from ***Qualys v2*** to ***Qualys VMDR***. - Added the following commands: - ***fetch-events*** From 013ef1f994c4261e509fdeac76cd9933a9551829 Mon Sep 17 00:00:00 2001 From: Moshe Date: Tue, 26 Mar 2024 11:20:46 +0200 Subject: [PATCH 09/16] fixed mypy --- Packs/qualys/.pack-ignore | 3 +++ Packs/qualys/Integrations/Qualysv2/Qualysv2.py | 8 ++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/Packs/qualys/.pack-ignore b/Packs/qualys/.pack-ignore index 9e14ecfc4e01..95e8e339c6e7 100644 --- a/Packs/qualys/.pack-ignore +++ b/Packs/qualys/.pack-ignore @@ -9,3 +9,6 @@ ignore=IM111 [file:QualysCreateIncidentFromReport.yml] ignore=SC105 + +[file:Qualysv2.yml] +ignore=IN150,IN161 diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index f1a9a98832b0..7c530775fc7b 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -3,7 +3,6 @@ from CommonServerPython import * # noqa: F401 from collections.abc import Callable from typing import Any -import urllib3 import csv import io import requests @@ -2757,10 +2756,10 @@ def handle_host_list_detection_result(raw_response: requests.Response) -> tuple[ if isinstance(response_requested_value, dict): response_requested_value = [response_requested_value] - return response_requested_value, response_next_url + return response_requested_value, str(response_next_url) -def handle_vulnerabilities_result(raw_response: requests.Response) -> tuple[Optional[list], Optional[str]]: +def handle_vulnerabilities_result(raw_response: requests.Response) -> list: """ Handles vulnerabilities response - parses xml to json and gets the list Args: @@ -2945,7 +2944,8 @@ def fetch_assets(client): Args: client: command client Return: - event: events to push to xsiam + assets: assets to push to xsiam + vulnerabilities: vulnerabilities to push to xsiam """ demisto.debug('Starting fetch for assets') since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) # type: ignore[union-attr] From 8d95aa663c4ce65f1019c4602361bcea39334a99 Mon Sep 17 00:00:00 2001 From: Moshe Date: Tue, 26 Mar 2024 11:33:58 +0200 Subject: [PATCH 10/16] changed the limit --- Packs/qualys/Integrations/Qualysv2/Qualysv2.py | 5 +++-- Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index 7c530775fc7b..eb68fa7af1e4 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -29,6 +29,7 @@ HOST_LAST_FETCH = 'host_last_fetch' ASSETS_FETCH_FROM = '90 days' MIN_ASSETS_INTERVAL = 59 +HOST_LIMIT = 1000 ASSETS_DATE_FORMAT = '%Y-%m-%d' DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ" # ISO8601 format with UTC, default in XSOAR @@ -1670,7 +1671,7 @@ def get_host_list_detection(self, since_datetime, next_page=None) -> Union[str, """ self._headers.update({"Content-Type": 'application/json'}) params: dict[str, Any] = { - "truncation_limit": 3, + "truncation_limit": HOST_LIMIT, "vm_scan_date_after": since_datetime } if next_page: @@ -2904,7 +2905,7 @@ def get_host_list_detections_events(client, since_datetime) -> list: Host list detections assets """ demisto.debug('Starting to fetch assets') - assets = List[Any] + assets = [] next_page = '' while True: diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py index 6b2cf36f4478..2a51fb876c23 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py @@ -23,7 +23,7 @@ get_simple_response_from_raw, validate_required_group, get_activity_logs_events_command, - fetch_events, get_activity_logs_events, fetch_assets, ASSETS_FETCH_FROM, ASSETS_DATE_FORMAT + fetch_events, get_activity_logs_events, fetch_assets, ASSETS_FETCH_FROM, ASSETS_DATE_FORMAT, HOST_LIMIT ) from CommonServerPython import * # noqa: F401 @@ -154,7 +154,7 @@ def test_fetch_assets_command(requests_mock): with open('./test_data/vulnerabilities_raw.xml') as f: vulnerabilities = f.read() requests_mock.get(f'{base_url}api/2.0/fo/asset/host/vm/detection/' - f'?action=list&truncation_limit=3&vm_scan_date_after=' + f'?action=list&truncation_limit={HOST_LIMIT}&vm_scan_date_after=' f'{arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', text=assets) requests_mock.post(f'{base_url}api/2.0/fo/knowledge_base/vuln/' From e014954db3eaec2cbfef7ed486dd0a0fb6df74e7 Mon Sep 17 00:00:00 2001 From: Moshe Date: Tue, 26 Mar 2024 11:50:23 +0200 Subject: [PATCH 11/16] update readme --- Packs/qualys/Integrations/Qualysv2/README.md | 44 +++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/Packs/qualys/Integrations/Qualysv2/README.md b/Packs/qualys/Integrations/Qualysv2/README.md index 8cfabd241b8f..d2051ba9b5e6 100644 --- a/Packs/qualys/Integrations/Qualysv2/README.md +++ b/Packs/qualys/Integrations/Qualysv2/README.md @@ -22067,4 +22067,46 @@ There is no context output for this command. #### Human Readable Output ->Successfully purged 1 record \ No newline at end of file +>Successfully purged 1 record + + +### qualys-get-events +*** +Manual command to fetch events from Qualys and display them. + + +#### Base Command + +`qualys-get-events` +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| should_push_events | If true, the command will create events, otherwise it will only display them. Default is false. | Required | +| limit | Date to return results from. | Optional | +| since_datetime | Date to return results from. | Optional | +| offset | Offset which events to return. | Optional | + + +#### Context Output + +There is no context output for this command. + +### qualys-get-assets +*** +Manual command to fetch assets from Qualys and display them. + + +#### Base Command + +`qualys-get-assets` +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| should_push_assets | If true, the command will create assets, otherwise it will only display the amount of available assets. Default is false. | Required | + + +#### Context Output + +There is no context output for this command. \ No newline at end of file From 8540b4c7cee61ce59b4851ceff998275d1ae84d9 Mon Sep 17 00:00:00 2001 From: Moshe Date: Tue, 26 Mar 2024 16:17:16 +0200 Subject: [PATCH 12/16] mypy --- Packs/qualys/Integrations/Qualysv2/Qualysv2.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index eb68fa7af1e4..83c41a303b08 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -2737,7 +2737,7 @@ def get_next_page_activity_logs(footer): return max_id -def handle_host_list_detection_result(raw_response: requests.Response) -> tuple[Optional[list], Optional[str]]: +def handle_host_list_detection_result(raw_response: requests.Response) -> tuple[list, Optional[str]]: """ Handles Host list detection response - parses xml to json and gets the list Args: @@ -2905,7 +2905,7 @@ def get_host_list_detections_events(client, since_datetime) -> list: Host list detections assets """ demisto.debug('Starting to fetch assets') - assets = [] + assets = [] # type: ignore[var-annotated] next_page = '' while True: From 7d5b66460e8a963c5681267de6485629d7f8574c Mon Sep 17 00:00:00 2001 From: Moshe Date: Wed, 27 Mar 2024 18:43:59 +0200 Subject: [PATCH 13/16] added breaking Changes --- Packs/qualys/ReleaseNotes/3_0_0.json | 1 + Packs/qualys/ReleaseNotes/3_0_0.md | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 Packs/qualys/ReleaseNotes/3_0_0.json diff --git a/Packs/qualys/ReleaseNotes/3_0_0.json b/Packs/qualys/ReleaseNotes/3_0_0.json new file mode 100644 index 000000000000..82a24efeca3e --- /dev/null +++ b/Packs/qualys/ReleaseNotes/3_0_0.json @@ -0,0 +1 @@ +{"breakingChanges":true,"breakingChangesNotes":"The ***Qualys Event Collector*** was removed from this pack. To fetch events and assets from Qualys, use the ***Qualys VMDR*** integration instead. Renamed the integration from ***Qualys v2*** to ***Qualys VMDR***."} diff --git a/Packs/qualys/ReleaseNotes/3_0_0.md b/Packs/qualys/ReleaseNotes/3_0_0.md index 5b53d7b4bcd1..c148fd2bd57f 100644 --- a/Packs/qualys/ReleaseNotes/3_0_0.md +++ b/Packs/qualys/ReleaseNotes/3_0_0.md @@ -1,10 +1,11 @@ #### Integrations ##### Qualys VMDR +- **Breaking Change**: The ***Qualys Event Collector*** was removed from this pack. To fetch events and assets from Qualys, use the ***Qualys VMDR*** integration instead. - Updated the Docker image to: *demisto/python3:3.10.14.90585*. - Renamed the integration from ***Qualys v2*** to ***Qualys VMDR***. - Added the following commands: - ***fetch-events*** - ***fetch-assets*** - ***qualys-get-events*** - - ***qualys-get-assets*** + - ***qualys-get-assets*** From 04de032a4c3237a575ead3d7c06070fa7a15ebec Mon Sep 17 00:00:00 2001 From: Moshe Date: Sun, 31 Mar 2024 15:53:49 +0300 Subject: [PATCH 14/16] added since_datetime to test_module --- Packs/qualys/Integrations/Qualysv2/Qualysv2.py | 9 ++++++--- Packs/qualys/Integrations/Qualysv2/Qualysv2.yml | 2 ++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index 83c41a303b08..2cb5a56e412e 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -2940,16 +2940,18 @@ def get_vulnerabilities(client, since_datetime) -> list: return vulnerabilities -def fetch_assets(client): +def fetch_assets(client, since_datetime=None): """ Fetches host list detections Args: client: command client + since_datetime: The start fetch date. Return: assets: assets to push to xsiam vulnerabilities: vulnerabilities to push to xsiam """ demisto.debug('Starting fetch for assets') - since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) # type: ignore[union-attr] + if not since_datetime: + since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) # type: ignore[union-attr] assets = get_host_list_detections_events(client, since_datetime) vulnerabilities = get_vulnerabilities(client, since_datetime) @@ -3061,7 +3063,8 @@ def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) - previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, ) if params.get('isFetchAssets'): - fetch_assets(client=client) + since_datetime = arg_to_datetime('3 days').strftime(ASSETS_DATE_FORMAT) # type: ignore[union-attr] + fetch_assets(client=client, since_datetime=since_datetime) return 'ok' diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml index b2f0409d7f28..0864e35a5eb8 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml @@ -36,12 +36,14 @@ configuration: name: first_fetch additionalinfo: If "First event fetch time" is set for a long time ago, it may cause performance issues. required: false + advanced: true type: 0 - defaultvalue: 10000 section: Collect display: Event Fetch Limit name: max_fetch_activity_logs additionalinfo: Maximum number of events to fetch per fetch iteration. + advanced: true required: false type: 0 description: Qualys Vulnerability Management lets you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. From 0139c9c159fc877a9d94d5f7f255249242d85984 Mon Sep 17 00:00:00 2001 From: Moshe Date: Wed, 3 Apr 2024 11:20:17 +0300 Subject: [PATCH 15/16] description --- Packs/qualys/Integrations/Qualysv2/Qualysv2.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml index 0864e35a5eb8..beb21139ed95 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml @@ -32,7 +32,7 @@ configuration: advanced: true - defaultvalue: 3 days section: Collect - display: First event fetch time + display: Event first fetch time name: first_fetch additionalinfo: If "First event fetch time" is set for a long time ago, it may cause performance issues. required: false @@ -46,7 +46,7 @@ configuration: advanced: true required: false type: 0 -description: Qualys Vulnerability Management lets you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. +description: Qualys Vulnerability Management lets you create, run, manage reports and to fetch Activity Logs, Assets and Vulnerabilities, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. display: Qualys VMDR name: QualysV2 script: From 512b6d0606cf0cd5f298e2d2954cbfe751780ad3 Mon Sep 17 00:00:00 2001 From: Moshe Date: Thu, 4 Apr 2024 09:41:35 +0300 Subject: [PATCH 16/16] fetch interval and dataset name --- Packs/qualys/Integrations/Qualysv2/Qualysv2.py | 2 +- Packs/qualys/Integrations/Qualysv2/Qualysv2.yml | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index 2cb5a56e412e..7e2ddbce415a 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -3443,7 +3443,7 @@ def main(): # pragma: no cover return execution_start_time = time.time() assets, vulnerabilities = fetch_assets(client=client) - send_data_to_xsiam(data=assets, vendor=VENDOR, product='host_detections', data_type='assets') + send_data_to_xsiam(data=assets, vendor=VENDOR, product='assets', data_type='assets') send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities', data_type='assets') demisto.setAssetsLastRun({'assets_last_fetch': execution_start_time}) diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml index beb21139ed95..988215ff9075 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml @@ -46,6 +46,14 @@ configuration: advanced: true required: false type: 0 +- additionalinfo: The fetch interval. It is recommended to set it to 24 hours, and the minimum interval is 1 hour. + defaultvalue: 1440 + display: Assets and Vulnerabilities Fetch Interval + name: assetsFetchInterval + type: 19 + section: Collect + advanced: true + required: false description: Qualys Vulnerability Management lets you create, run, manage reports and to fetch Activity Logs, Assets and Vulnerabilities, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. display: Qualys VMDR name: QualysV2