diff --git a/Packs/qualys/.pack-ignore b/Packs/qualys/.pack-ignore index 9e14ecfc4e01..95e8e339c6e7 100644 --- a/Packs/qualys/.pack-ignore +++ b/Packs/qualys/.pack-ignore @@ -9,3 +9,6 @@ ignore=IM111 [file:QualysCreateIncidentFromReport.yml] ignore=SC105 + +[file:Qualysv2.yml] +ignore=IN150,IN161 diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py deleted file mode 100644 index ab2b414e7821..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.py +++ /dev/null @@ -1,642 +0,0 @@ -import copy -import demistomock as demisto # noqa: F401 -from CommonServerPython import * # noqa: F401 -from typing import Any -import urllib3 -import csv -import io - -# Disable insecure warnings -urllib3.disable_warnings() - -""" CONSTANTS """ - -DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' -API_SUFFIX = "/api/2.0/fo/" -VENDOR = 'qualys' -PRODUCT = 'qualys' -BEGIN_RESPONSE_LOGS_CSV = "----BEGIN_RESPONSE_BODY_CSV" -END_RESPONSE_LOGS_CSV = "----END_RESPONSE_BODY_CSV" -BEGIN_RESPONSE_FOOTER_CSV = "----BEGIN_RESPONSE_FOOTER_CSV" -END_RESPONSE_FOOTER_CSV = "----END_RESPONSE_FOOTER_CSV" -WARNING = 'WARNING' -ACTIVITY_LOGS_NEWEST_EVENT_DATETIME = 'activity_logs_newest_event_datetime' -ACTIVITY_LOGS_NEXT_PAGE = 'activity_logs_next_page' -ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN = 'activity_logs_since_datetime_prev_run' -HOST_DETECTIONS_NEWEST_EVENT_DATETIME = 'host_detections_newest_event_datetime' -HOST_DETECTIONS_NEXT_PAGE = 'host_detections_next_page' -HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN = 'host_detections_since_datetime_prev_run' -HOST_LAST_FETCH = 'host_last_fetch' - -""" CLIENT CLASS """ - - -class Client(BaseClient): - def __init__(self, base_url, username, password, verify=True, proxy=False, headers=None): - super().__init__(base_url, verify=verify, proxy=proxy, headers=headers, auth=(username, password)) - - @staticmethod - def error_handler(res): - """ Handles error on API request to Qyalys """ - err_msg = f"Error in API call [{res.status_code}] - {res.reason}" - try: - simple_response = get_simple_response_from_raw(parse_raw_response(res.text)) - err_msg = f'{err_msg}\nError Code: {simple_response.get("CODE")}\nError Message: {simple_response.get("TEXT")}' - except Exception: - raise DemistoException(err_msg, res=res) - - def get_user_activity_logs(self, since_datetime: str, max_fetch: int = 0, next_page=None) -> Union[str, bytes]: - """ - Make a http request to Qualys API to get user activities logs - Args: - Returns: - response from Qualys API - Raises: - DemistoException: can be raised by the _http_request function - """ - self._headers.update({"Content-Type": 'application/json'}) - params: dict[str, Any] = { - "truncation_limit": max_fetch - } - if since_datetime: - params["since_datetime"] = since_datetime - if next_page: - params["id_max"] = next_page - - response = self._http_request( - method='GET', - url_suffix=urljoin(API_SUFFIX, 'activity_log/?action=list'), - resp_type='text/csv', - params=params, - timeout=60, - error_handler=self.error_handler, - ) - - return response.text - - def get_host_list_detection(self, since_datetime: str, max_fetch: int = 0, next_page=None) -> Union[str, bytes]: - """ - Make a http request to Qualys API to get user activities logs - Args: - Returns: - response from Qualys API - Raises: - DemistoException: can be raised by the _http_request function - """ - self._headers.update({"Content-Type": 'application/json'}) - params: dict[str, Any] = { - "truncation_limit": max_fetch - } - if since_datetime: - params["vm_scan_date_after"] = since_datetime - if next_page: - params["id_min"] = next_page - - response = self._http_request( - method='GET', - url_suffix=urljoin(API_SUFFIX, 'asset/host/vm/detection/?action=list'), - resp_type='text', - params=params, - timeout=60, - error_handler=self.error_handler, - ) - - return response - - -def get_partial_response(response: str, start: str, end: str): - """ Cut response string from start to end tokens. - """ - if start not in response or end not in response: - return None - start_index = response.index(start) + len(start) - end_index = response.index(end) - result = response[start_index:end_index].strip() - if result.startswith(WARNING): - result = result.replace(WARNING, '').strip() - return result - - -def csv2json(csv_data: str): - """ Converts data from csv to json - Args: - csv_data: data in csv format - Returns: - the same data in json formal - """ - reader = csv.DictReader(io.StringIO(csv_data)) - json_data = list(reader) - return json_data - - -def get_next_page_from_url(url, field): - """ - Get the next page field from url. - """ - match = re.search(rf"{field}=(\d+)", url) - res = match.group(1) if match else None - return res - - -def get_next_page_activity_logs(footer): - """ - Extracts the next token from activity logs response. - """ - if isinstance(footer, list): - footer = footer[0] - next_url = footer.get('URL', '') - max_id = get_next_page_from_url(next_url, 'id_max') - return max_id - - -def handle_host_list_detection_result(raw_response: requests.Response) -> tuple[Optional[list], Optional[str]]: - """ - Handles Host list detection response - parses xml to json and gets the list - Args: - raw_response (requests.Response): the raw result received from Qualys API command - Returns: - List with data generated for the result given - """ - formatted_response = parse_raw_response(raw_response) - simple_response = get_simple_response_from_raw(formatted_response) - if simple_response and simple_response.get("CODE"): - raise DemistoException(f"\n{simple_response.get('TEXT')} \nCode: {simple_response.get('CODE')}") - - response_requested_value = dict_safe_get(formatted_response, - ["HOST_LIST_VM_DETECTION_OUTPUT", "RESPONSE", "HOST_LIST", "HOST"]) - response_next_url = dict_safe_get(formatted_response, - ["HOST_LIST_VM_DETECTION_OUTPUT", "RESPONSE", "WARNING", "URL"], default_return_value='') - if isinstance(response_requested_value, dict): - response_requested_value = [response_requested_value] - - return response_requested_value, response_next_url - - -def parse_raw_response(response: Union[bytes, requests.Response]) -> dict: - """ - Parses raw response from Qualys. - Load xml as JSON. - Args: - response (Union[bytes, requests.Response]): Response from Qualys service. - - Returns: - (Dict): Dict representing the data returned by Qualys service. - """ - return json.loads(xml2json(response)) - - -def get_simple_response_from_raw(raw_response: Any) -> Union[Any, dict]: - """ - Gets the simple response from a given JSON dict structure returned by Qualys service - If object is not a dict, returns None. - Args: - raw_response (Any): Raw response from Qualys service. - - Returns: - (Union[Any, Dict]): Simple response path if object is a dict, else response as is. - """ - simple_response = None - if raw_response and isinstance(raw_response, dict): - simple_response = raw_response.get("SIMPLE_RETURN", {}).get("RESPONSE", {}) - return simple_response - - -def remove_events_before_last_scan(events, last_run): - try: - edited_events = [] - for event in events: - if first_found := event.get('DETECTION', {}).get('FIRST_FOUND_DATETIME'): - if datetime.strptime(first_found, DATE_FORMAT) < datetime.strptime(last_run, DATE_FORMAT): - demisto.debug( - f'Removed event with time: {first_found}, qid: {event.get("DETECTION", {}).get("ID")}') - else: - edited_events.append(event) - return edited_events - except Exception as e: - raise Exception(f'Failed to remove previous events. Error:{str(e)}') - - -def remove_last_events(events, time_to_remove, time_field): - """ Removes events with certain time. - Args: - events: list of events to remove the time from - time_to_remove: remove events with this time - time_field: the field name where the time is - """ - new_events = [] - for event in events: - if event.get(time_field) == time_to_remove: - demisto.debug(f'Removed activity log event with time: {time_to_remove}, log: {event}') - else: - new_events.append(event) - return new_events - - -def add_fields_to_events(events, time_field_path, event_type_field): - """ - Adds the _time key to the events. - Args: - events: List[Dict] - list of events to add the _time key to. - time_field_path: the list of fields to get _time from - event_type_field: type field in order to distinguish between the API's - Returns: - list: The events with the _time key. - """ - if events: - for event in events: - event['_time'] = dict_safe_get(event, time_field_path) - event['event_type'] = event_type_field - - -def get_detections_from_hosts(hosts): - """ - Parses detections from hosts. - Each host contains list of detections: - {'ID':1, - 'IP': '1.1.1.1', - 'LAST_VM_SCANNED_DATE': '01-01-2020', - 'DETECTION_LIST': {'DETECTION': [first_detection_data, second_detection, ...]} - 'additional_fields': ... - } - - The function parses the data in the following way: - {''ID':1, - 'IP': '1.1.1.1', - 'LAST_VM_SCANNED_DATE': '01-01-2020', - 'DETECTION': first_detection_data - 'additional_fields': ... - }, - {'ID':1, - 'IP': '1.1.1.1', - 'LAST_VM_SCANNED_DATE': '01-01-2020', - 'DETECTION': second_detection_data - 'additional_fields': ... - } - .... - - :param hosts: list of hosts that contains detections. - :return: parsed events. - """ - fetched_events = [] - for host in hosts: - if detections_list := host.get('DETECTION_LIST', {}).get('DETECTION'): - if isinstance(detections_list, list): - for detection in detections_list: - new_detection = copy.deepcopy(host) - del new_detection['DETECTION_LIST'] - new_detection['DETECTION'] = detection - fetched_events.append(new_detection) - elif isinstance(detections_list, dict): - new_detection = copy.deepcopy(host) - new_detection['DETECTION'] = detections_list - del new_detection['DETECTION_LIST'] - fetched_events.append(new_detection) - else: - del host['DETECTION_LIST'] - host['DETECTION'] = {} - fetched_events.append(host) - return fetched_events - - -def get_activity_logs_events(client, since_datetime, max_fetch, next_page=None) -> tuple[Optional[list], dict]: - """ Get logs activity from qualys - API response returns events sorted in descending order. We are saving the next_page param and - sending next request with next_page arg if needed. Saving the newest event fetched. - We are deleting the newest event each time to avoid duplication. - Args: - client: Qualys client - since_datetime: datetime to get events from - max_fetch: max number of events to return - next_page: pagination marking - Returns: - Logs activity events, Next run datetime - """ - demisto.debug(f'Starting to fetch activity logs events: since_datetime={since_datetime}, next_page={next_page}') - activity_logs = client.get_user_activity_logs(since_datetime=since_datetime, max_fetch=max_fetch, next_page=next_page) - activity_logs_events = csv2json(get_partial_response(activity_logs, BEGIN_RESPONSE_LOGS_CSV, - END_RESPONSE_LOGS_CSV) or activity_logs) or [] - footer_json = csv2json(get_partial_response(activity_logs, BEGIN_RESPONSE_FOOTER_CSV, - END_RESPONSE_FOOTER_CSV)) or {} - new_next_page = get_next_page_activity_logs(footer_json) - demisto.debug(f'Got activity logs events from server: {len(activity_logs_events)=}.') - - newest_event_time = activity_logs_events[0].get('Date') if activity_logs_events else since_datetime - - if not next_page: - activity_logs_events = remove_last_events(activity_logs_events, newest_event_time, 'Date') - add_fields_to_events(activity_logs_events, ['Date'], 'activity_log') - - next_run_dict = { - ACTIVITY_LOGS_NEWEST_EVENT_DATETIME: newest_event_time, - ACTIVITY_LOGS_NEXT_PAGE: new_next_page, - ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN: since_datetime, - } - demisto.debug(f'Done to fetch activity logs events: {next_run_dict=}, sending {len(activity_logs_events)} events.') - return activity_logs_events, next_run_dict - - -def get_host_list_detections_events(client, last_time, max_fetch, next_page=None) -> tuple[Optional[list], dict]: - """ Get host list detections from qualys - We are saving the next_page param and sending next request with next_page arg if needed. Saving the newest event fetched. - We are deleting the newest event each time to avoid duplications. - Args: - client: Qualys client - last_time: datetime to get events from - max_fetch: max number of events to return - next_page: pagination marking - Returns: - Host list detections events, Next run datetime - """ - demisto.debug(f'Starting to fetch host list events: last_time={last_time}, next_page={next_page}') - - host_list_detections = client.get_host_list_detection(since_datetime=last_time, max_fetch=max_fetch, next_page=next_page) - host_list_events, next_url = handle_host_list_detection_result(host_list_detections) or [] - newest_event_time = host_list_events[0].get('LAST_VM_SCANNED_DATE') if host_list_events else last_time - - new_next_page = get_next_page_from_url(next_url, 'id_min') - - if newest_event_time == last_time: - edited_host_detections = [] - new_next_page = None - else: - edited_host_detections = get_detections_from_hosts(host_list_events) - demisto.debug(f'Parsed detections from hosts, got {len(edited_host_detections)=} events.') - - edited_host_detections = remove_events_before_last_scan(edited_host_detections, last_time) - - add_fields_to_events(edited_host_detections, ['DETECTION', 'FIRST_FOUND_DATETIME'], 'host_list_detection') - - next_run_dict = { - HOST_LAST_FETCH: datetime.now().strftime(DATE_FORMAT) if not new_next_page else None, - HOST_DETECTIONS_NEWEST_EVENT_DATETIME: newest_event_time, - HOST_DETECTIONS_NEXT_PAGE: new_next_page, - HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN: last_time, - } - demisto.debug(f'Done to fetch host list events: {next_run_dict=}, sending {len(edited_host_detections)} events.') - - return edited_host_detections, next_run_dict - - -def fetch_events(client, last_run, first_fetch_time, fetch_function, newest_event_field, next_page_field, - previous_run_time_field, max_fetch: Optional[int] = 0): - """ Fetches activity logs and host list detections - Args: - client: command client - last_run: last fetch time - first_fetch_time: when start to fetch from - fetch_function: function that gets the events - max_fetch: max number of items to return (0 to return all) - newest_event_field - next_page_field - previous_run_time_field - Return: - next_last_run: where to fetch from next time - event: events to push to xsiam - """ - demisto.debug(f'Starting fetch for {fetch_function.__name__}, last run: {last_run}') - newest_event_time = last_run.get(newest_event_field) if last_run else None - next_page = last_run.get(next_page_field) - previous_time_field = last_run.get(previous_run_time_field) - - if not newest_event_time: - newest_event_time = first_fetch_time - - time_to_fetch = newest_event_time if not next_page else previous_time_field - - events, new_next_run = fetch_function(client, time_to_fetch, max_fetch, next_page) - - updated_next_run = {previous_run_time_field: time_to_fetch} - new_next_page = new_next_run.get(next_page_field) - - # if the fetch is not during the pagination (fetched without next_page) - if not next_page: - # update the newest event - updated_next_run[newest_event_field] = new_next_run.get(newest_event_field) - - # update if there is next page and this fetch is not over - updated_next_run[next_page_field] = new_next_page - - if last_fetch_time := new_next_run.get(HOST_LAST_FETCH): - updated_next_run[HOST_LAST_FETCH] = last_fetch_time - - demisto.info(f"Sending len{len(events)} to XSIAM. updated_next_run={updated_next_run}.") - return updated_next_run, events - - -def get_activity_logs_events_command(client, args, first_fetch_time): - """ - Args: - client: command client - args: Demisto args for this command: limit and since_datetime - first_fetch_time: first fetch time - Retuns: - Command results with activity logs - - """ - limit = arg_to_number(args.get('limit', 50)) - offset = arg_to_number(args.get('offset', 0)) - since_datetime = arg_to_datetime(args.get('since_datetime')) - since_datetime = since_datetime.strftime(DATE_FORMAT) if since_datetime else first_fetch_time - activity_logs_events, _ = get_activity_logs_events( - client=client, - since_datetime=since_datetime, - max_fetch=0, - ) - limited_activity_logs_events = activity_logs_events[offset:limit + offset] # type: ignore[index,operator] - activity_logs_hr = tableToMarkdown(name='Activity Logs', t=limited_activity_logs_events) - results = CommandResults( - readable_output=activity_logs_hr, - raw_response=limited_activity_logs_events, - ) - - return limited_activity_logs_events, results - - -def get_host_list_detections_events_command(client, args, first_fetch_time): - """ - Args: - client: command client - args: Demisto args for this command: limit and since_datetime - first_fetch_time: first fetch time - Retuns: - Command results with host list detections - - """ - limit = arg_to_number(args.get('limit', 50)) - offset = arg_to_number(args.get('offset', 0)) - since_datetime = arg_to_datetime(args.get('vm_scan_date_after')) - last_run = since_datetime.strftime(DATE_FORMAT) if since_datetime else first_fetch_time - - host_list_detection_events, _ = get_host_list_detections_events( - client=client, - last_time=last_run, - max_fetch=0, - ) - limited_host_list_detection_events = host_list_detection_events[offset:limit + offset] # type: ignore[index,operator] - host_list_detection_hr = tableToMarkdown(name='Host List Detection', t=limited_host_list_detection_events) - results = CommandResults( - readable_output=host_list_detection_hr, - raw_response=limited_host_list_detection_events, - ) - - return limited_host_list_detection_events, results - - -def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) -> str: - """ - Tests API connectivity and authentication' - When 'ok' is returned it indicates the integration works like it is supposed to and connection to the service is - successful. - Raises exceptions if something goes wrong. - Args: - client (Client): HelloWorld client to use. - params (Dict): Integration parameters. - first_fetch_time (int): The first fetch time as configured in the integration params. - Returns: - str: 'ok' if test passed, anything else will raise an exception and will fail the test. - """ - fetch_events( - client=client, - last_run={}, - first_fetch_time=first_fetch_time, - max_fetch=1, - fetch_function=get_activity_logs_events, - newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, - next_page_field=ACTIVITY_LOGS_NEXT_PAGE, - previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, - ) - fetch_events( - client=client, - last_run={}, - first_fetch_time=first_fetch_time, - max_fetch=1, - fetch_function=get_host_list_detections_events, - newest_event_field=HOST_DETECTIONS_NEWEST_EVENT_DATETIME, - next_page_field=HOST_DETECTIONS_NEXT_PAGE, - previous_run_time_field=HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN, - ) - - return 'ok' - - -def should_run_host_detections_fetch(last_run, host_detections_fetch_interval: timedelta, datetime_now: datetime): - """ - - Args: - last_run: last run object. - host_detections_fetch_interval: host detection fetch interval. - datetime_now: time now - - Returns: True if fetch host detections interval time has passed since last time that fetch run. - - """ - if last_fetch_time := last_run.get(HOST_LAST_FETCH): - last_check_time = datetime.strptime(last_fetch_time, DATE_FORMAT) - else: - # never run host detections fetch before - return True - demisto.debug(f'Should run host detections? {last_check_time=}, {host_detections_fetch_interval=}') - return datetime_now - last_check_time > host_detections_fetch_interval - - -""" MAIN FUNCTION """ - - -def main(): # pragma: no cover - params = demisto.params() - args = demisto.args() - command = demisto.command() - - base_url = params.get('url') - verify_certificate = not params.get("insecure", False) - proxy = params.get("proxy", False) - username = params.get("credentials").get("identifier") - password = params.get("credentials").get("password") - - max_fetch_activity_logs = arg_to_number(params.get("max_fetch_activity_logs", 0)) - max_fetch_hosts = arg_to_number(params.get("max_fetch_hosts_detections", 0)) - # How much time before the first fetch to retrieve events - first_fetch_datetime: datetime = arg_to_datetime( # type: ignore[assignment] - arg=params.get('first_fetch', '3 days'), - arg_name='First fetch time', - required=True - ) - - parsed_interval = dateparser.parse(params.get('host_detections_fetch_interval', '12 hours')) or dateparser.parse('12 hours') - host_detections_fetch_interval: timedelta = (datetime.now() - parsed_interval) # type: ignore[operator] - first_fetch_str = first_fetch_datetime.strftime(DATE_FORMAT) - - demisto.info(f'Command being called is {command}') - - try: - headers: dict = {"X-Requested-With": "Cortex XSIAM"} - - client = Client( - base_url=base_url, - username=username, - password=password, - verify=verify_certificate, - headers=headers, - proxy=proxy - ) - - if command == 'test-module': - # This is the call made when pressing the integration Test button. - result = test_module(client, params, first_fetch_str) - return_results(result) - - elif command == "qualys-get-activity-logs": - should_push_events = argToBoolean(args.get('should_push_events', False)) - events, results = get_activity_logs_events_command(client, args, first_fetch_str) - return_results(results) - if should_push_events: - send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT) - - elif command == "qualys-get-host-detections": - should_push_events = argToBoolean(args.get('should_push_events', False)) - events, results = get_host_list_detections_events_command(client, args, first_fetch_str) - return_results(results) - if should_push_events: - send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT) - - elif command == 'fetch-events': - last_run = demisto.getLastRun() - host_list_detection_events = [] - host_next_run = {} - if should_run_host_detections_fetch(last_run=last_run, - host_detections_fetch_interval=host_detections_fetch_interval, - datetime_now=datetime.now()): - host_next_run, host_list_detection_events = fetch_events( - client=client, - last_run=last_run, - newest_event_field=HOST_DETECTIONS_NEWEST_EVENT_DATETIME, - next_page_field=HOST_DETECTIONS_NEXT_PAGE, - previous_run_time_field=HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN, - fetch_function=get_host_list_detections_events, - first_fetch_time=first_fetch_str, - max_fetch=max_fetch_hosts, - ) - logs_next_run, activity_logs_events = fetch_events( - client=client, - last_run=last_run, - newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, - next_page_field=ACTIVITY_LOGS_NEXT_PAGE, - previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, - fetch_function=get_activity_logs_events, - first_fetch_time=first_fetch_str, - max_fetch=max_fetch_activity_logs, - ) - send_events_to_xsiam(activity_logs_events + host_list_detection_events, vendor=VENDOR, product=PRODUCT) - - # saves next_run for the time fetch-events is invoked - last_run.update(logs_next_run) - last_run.update(host_next_run) - demisto.setLastRun(last_run) - - # Log exceptions and return errors - except Exception as e: - return_error(f'Failed to execute {command} command.\nError:\n{str(e)}') - - -if __name__ in ("__main__", "__builtin__", "builtins"): - main() diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml deleted file mode 100644 index cae1dfe6e50d..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector.yml +++ /dev/null @@ -1,115 +0,0 @@ -category: Analytics & SIEM -sectionOrder: -- Connect -- Collect -commonfields: - id: QualysEventCollector - version: -1 -configuration: -- defaultvalue: https://qualysguard.qg2.apps.qualys.com - display: Server URL - name: url - required: true - type: 0 - section: Connect -- display: Username - name: credentials - required: true - type: 9 - section: Connect -- display: Trust any certificate (not secure) - name: insecure - required: false - type: 8 - section: Connect -- display: Use system proxy settings - name: proxy - required: false - type: 8 - section: Connect -- defaultvalue: 3 days - section: Collect - display: First fetch time - name: first_fetch - additionalinfo: If "First Fetch Time" is set for a long time ago, it may cause performance issues. - required: true - type: 0 -- display: Vulnerability Fetch Interval - additionalinfo: Time between fetches of vulnerabilities (for example 12 hours, 60 minutes, etc.). - name: host_detections_fetch_interval - required: true - section: Collect - defaultvalue: 12 hours - type: 0 -- section: Collect - advanced: true - display: Activity Logs Fetch Interval - additionalinfo: Time between fetches of activity logs. - name: eventFetchInterval - defaultvalue: "1" - type: 19 - required: false -- defaultvalue: 10000 - section: Collect - display: Activity Logs Fetch Limit - name: max_fetch_activity_logs - additionalinfo: Maximum number of activity logs to fetch per fetch iteration. - required: true - type: 0 -- defaultvalue: 1000 - section: Collect - display: Host Detections Fetch Limit - name: max_fetch_hosts_detections - additionalinfo: Maximum number of hosts to return in a single fetch iteration of host detections. Since each host may have multiple detections, it is likely that more events than the specified number will be fetched. - required: true - type: 0 -description: Qualys Event Collector fetches Activity Logs (Audit Logs) and Host Vulnerabilities. -display: Qualys Event Collector -name: QualysEventCollector -script: - commands: - - arguments: - - auto: PREDEFINED - defaultValue: 'false' - description: If true, the command will create events, otherwise it will only display them. - name: should_push_events - predefined: - - 'true' - - 'false' - required: true - - description: Maximum number of results to return. - name: limit - - description: Date to return results from. - name: since_datetime - - description: Offset which events to return. - name: offset - description: Gets activity logs from Qualys. - name: qualys-get-activity-logs - - arguments: - - auto: PREDEFINED - defaultValue: 'false' - description: If true, the command will create events, otherwise it will only display them. - name: should_push_events - predefined: - - 'true' - - 'false' - required: true - - description: Maximum number of results to return. - name: limit - - description: Offset which events to return. - name: offset - - description: Date to return results from. - name: vm_scan_date_after - description: Gets host detections from Qualys. - name: qualys-get-host-detections - dockerimage: demisto/python3:3.10.13.84405 - isfetchevents: true - runonce: false - script: '' - subtype: python3 - type: python -marketplaces: -- marketplacev2 -fromversion: 8.2.0 -tests: -- No tests (auto formatted) diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_description.md b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_description.md deleted file mode 100644 index 772eab628d0c..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_description.md +++ /dev/null @@ -1,20 +0,0 @@ -## Qualys Vulnerability Management Help - -- You need a Qualys user account to use the Qualys integration. If a subscription has multiple users, all users with any user role (except Contact) can use the Qualys integration. Each user’s permissions correspond to their assigned user role. - -- Qualys Vulnerability Management uses basic authentication. You'll need your Qualys login credentials in order to use this integration. - -- You can get your server URL by identifying your platform in this link: https://www.qualys.com/platform-identification/ - -- Qualys user accounts that have been enabled with VIP two-factor authentication can be used with the Qualys API, however two-factor authentication will not be used when making API requests. Two-factor authentication is only supported when logging into the Qualys GUI. - -### Fetch Information - -- There are two event types that are fetched for the Event Collector: - * Activity logs. - * Hosts Vulnerability. -You can adjust the fetch interval using the *Activity Logs Fetch Interval* and *Vulnerability Fetch Interval* arguments. - -- **Note**: We recommend setting "First Fetch Time" to fetch logs from no more than the last 3 days for each fetch. Using a greater fetch time, may cause performance issues. - -- Vulnerabilities in the dataset have event_type = "host_list_detections". diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_image.png b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_image.png deleted file mode 100644 index 5aba8a836b37..000000000000 Binary files a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_image.png and /dev/null differ diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py b/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py deleted file mode 100644 index 833bb99912ef..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_test.py +++ /dev/null @@ -1,219 +0,0 @@ -import freezegun -import pytest -from CommonServerPython import * # noqa: F401 - -from QualysEventCollector import get_activity_logs_events_command, get_host_list_detections_events_command, \ - Client, fetch_events, get_host_list_detections_events, get_activity_logs_events, should_run_host_detections_fetch - -ACTIVITY_LOGS_NEWEST_EVENT_DATETIME = 'activity_logs_newest_event_datetime' -ACTIVITY_LOGS_NEXT_PAGE = 'activity_logs_next_page' -ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN = 'activity_logs_since_datetime_prev_run' -HOST_DETECTIONS_NEWEST_EVENT_DATETIME = 'host_detections_newest_event_datetime' -HOST_DETECTIONS_NEXT_PAGE = 'host_detections_next_page' -HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN = 'host_detections_since_datetime_prev_run' -HOST_LAST_FETCH = 'host_last_fetch' -BEGIN_RESPONSE_LOGS_CSV = "----BEGIN_RESPONSE_BODY_CSV" -END_RESPONSE_LOGS_CSV = "----END_RESPONSE_BODY_CSV" -FOOTER = """----BEGIN_RESPONSE_FOOTER_CSV -WARNING -"CODE","TEXT","URL" -"1980","17 record limit exceeded. Use URL to get next batch of results.","https://server_url/api/2.0/fo/activity_log/ -?action=list&since_datetime=2022-12-21T03:42:05Z&truncation_limit=10&id_max=123456" -----END_RESPONSE_FOOTER_CSV""" - - -def test_get_activity_logs_events_command(requests_mock): - """ - Given: - - activity_logs_events_command - - When: - - Want to list all existing activity logs - - Then: - - Ensure Activity Logs Results in human-readable, and number of results reasonable. - """ - base_url = 'https://server_url/' - with open('test_data/activity_logs.csv') as f: - logs = f.read() - requests_mock.get(f'{base_url}api/2.0/fo/activity_log/' - f'?action=list&truncation_limit=0&since_datetime=2023-03-01T00%3A00%3A00Z', text=logs) - client = Client(base_url=base_url, - verify=True, - headers={}, - proxy=False, - username='demisto', - password='demisto', - ) - args = {'limit': 50, 'since_datetime': '1 March 2023'} - first_fetch = '2022-03-21T03:42:05Z' - activity_logs_events, results = get_activity_logs_events_command(client, args, first_fetch) - assert 'Activity Logs' in results.readable_output - assert len(activity_logs_events) == 17 - - -def test_get_host_list_detections_events_command(requests_mock): - """ - Given: - - host_list_detections_events_command - - When: - - Want to list all existing incidents - Then: - - Ensure List Host Detections Results in human-readable, and number of results reasonable. - """ - base_url = 'https://server_url/' - with open('./test_data/host_list_detections_raw.xml') as f: - logs = f.read() - requests_mock.get(f'{base_url}api/2.0/fo/asset/host/vm/detection/' - f'?action=list&truncation_limit=0&vm_scan_date_after=2023-03-01T00%3A00%3A00Z', text=logs) - client = Client(base_url=base_url, - verify=True, - headers={}, - proxy=False, - username='demisto', - password='demisto', - ) - args = {'limit': 50, 'vm_scan_date_after': '1 March 2023'} - first_fetch = '2022-03-21T03:42:05Z' - host_events, results = get_host_list_detections_events_command(client, args, first_fetch) - assert 'Host List Detection' in results.readable_output - assert len(host_events) == 8 - - -@pytest.mark.parametrize('last_run, fetch_interval_param, expected_should_run', [ - ('2023-05-24T11:55:35Z', '2023-05-24 00:00:00', False), - ('2023-05-23T11:55:35Z', '2023-05-24 00:00:00', True), - ({}, '2023-05-24 11:00:00', True), -]) -def test_should_run_host_detections_fetch(last_run, fetch_interval_param, expected_should_run): - """ - Given: - - should_run_host_detections_fetch command (fetches detections) - - When: - - Running fetch-events command and need to decide whether to fetch host detections - - Then: - - Ensure the expected result - """ - datetime_now = datetime.strptime('2023-05-24 12:00:00', '%Y-%m-%d %H:%M:%S') - delta = datetime.strptime(fetch_interval_param, '%Y-%m-%d %H:%M:%S') - fetch_interval = datetime_now - delta - last_run_dict = {'host_last_fetch': last_run} - should_run = should_run_host_detections_fetch(last_run=last_run_dict, - host_detections_fetch_interval=fetch_interval, - datetime_now=datetime_now) - assert should_run == expected_should_run - - -@pytest.mark.parametrize('activity_log_last_run, logs_number, add_footer', - [(None, 17, True), - ("2023-05-24T09:55:35Z", 0, True), - ("2023-05-14T15:04:55Z", 7, True), - ("2023-01-01T08:06:44Z", 17, False)]) -def test_fetch_logs_events_command(requests_mock, activity_log_last_run, logs_number, add_footer): - """ - Given: - - fetch events command (fetches logs) - - When: - - Running fetch-events command - - Then: - - Ensure number of events fetched - - Ensure next page token saved - - Ensure previous run saved - - Ensure newest event time saved - """ - first_fetch_str = '2022-12-21T03:42:05Z' - base_url = 'https://server_url/' - truncation_limit = logs_number - with open('test_data/activity_logs.csv') as f: - logs = f.read() - new_logs = f'{BEGIN_RESPONSE_LOGS_CSV}' - for row in logs.split('\n'): - if activity_log_last_run and activity_log_last_run in row: - new_logs += f'{row}\n' - break - new_logs += f'{row}\n' - new_logs += f'{END_RESPONSE_LOGS_CSV}\n' - if add_footer: - new_logs += f'{FOOTER}\n' - - requests_mock.get(f'{base_url}api/2.0/fo/activity_log/' - f'?action=list&truncation_limit={truncation_limit}&' - f'since_datetime={activity_log_last_run if activity_log_last_run else first_fetch_str}', - text=new_logs) - client = Client(base_url=base_url, - verify=True, - headers={}, - proxy=False, - username='demisto', - password='demisto', - ) - last_run = {ACTIVITY_LOGS_NEWEST_EVENT_DATETIME: activity_log_last_run} - - logs_next_run, activity_logs_events = fetch_events( - client=client, - last_run=last_run, - newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, - next_page_field=ACTIVITY_LOGS_NEXT_PAGE, - previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, - fetch_function=get_activity_logs_events, - first_fetch_time=first_fetch_str, - max_fetch=truncation_limit, - ) - assert len(activity_logs_events) == logs_number - assert logs_next_run.get(ACTIVITY_LOGS_NEXT_PAGE) == ("123456" if add_footer else None) - assert logs_next_run.get(ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN) == activity_log_last_run or first_fetch_str - assert logs_next_run.get(ACTIVITY_LOGS_NEWEST_EVENT_DATETIME) == "2023-05-24T09:55:35Z" - - -@freezegun.freeze_time('2023-05-16 16:00:00') -@pytest.mark.parametrize('host_last_run,detections_number', - [(None, 8), - ("2023-05-16T15:26:53Z", 4), - ("2023-05-14T15:04:55Z", 7)]) -def test_fetch_detection_events_command(requests_mock, host_last_run, detections_number): - """ - Given: - - fetch events command (fetches detections) - - When: - - Running fetch-events command - - Then: - - Ensure number of events fetched - """ - first_fetch_str = '2022-12-21T03:42:05Z' - base_url = 'https://server_url/' - truncation_limit = 10 - with open('./test_data/host_list_detections_raw.xml') as f: - hosts = f.read() - requests_mock.get(f'{base_url}api/2.0/fo/asset/host/vm/detection/' - f'?action=list&truncation_limit={truncation_limit}' - f'&vm_scan_date_after={host_last_run if host_last_run else first_fetch_str}', text=hosts) - client = Client( - base_url=base_url, - verify=True, - headers={}, - proxy=False, - username='demisto', - password='demisto', - ) - last_run = {HOST_DETECTIONS_NEWEST_EVENT_DATETIME: host_last_run} - host_next_run, host_list_detection_events = fetch_events( - client=client, - last_run=last_run, - newest_event_field=HOST_DETECTIONS_NEWEST_EVENT_DATETIME, - next_page_field=HOST_DETECTIONS_NEXT_PAGE, - previous_run_time_field=HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN, - fetch_function=get_host_list_detections_events, - first_fetch_time=first_fetch_str, - max_fetch=truncation_limit, - ) - - assert len(host_list_detection_events) == detections_number - assert host_next_run.get(HOST_DETECTIONS_NEWEST_EVENT_DATETIME) == '2023-05-16T15:26:01Z' - assert host_next_run.get(HOST_LAST_FETCH) == '2023-05-16T16:00:00Z' diff --git a/Packs/qualys/Integrations/QualysEventCollector/README.md b/Packs/qualys/Integrations/QualysEventCollector/README.md deleted file mode 100644 index 2281ab4cb971..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/README.md +++ /dev/null @@ -1,71 +0,0 @@ -Qualys Event Collector fetches Activity Logs (Audit Logs) and Host Vulnerabilities. -This integration was integrated and tested with version 3.15.2.0-1 of Qualys. - -## Configure Qualys Event Collector on Cortex XSOAR - -1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automation & Feed Integrations**. -2. Search for Qualys Event Collector. -3. Click **Add instance** to create and configure a new integration instance. - - | **Parameter** | **Description** | **Required** | - | --- | --- | --- | - | Server URL | | True | - | Username | | True | - | Password | | True | - | Trust any certificate (not secure) | | False | - | Use system proxy settings | | False | - | First fetch time | If "First Fetch Time" is set for a long time ago, it may cause performance issues. | True | - | Vulnerability Fetch Interval | Time between fetches of vulnerabilities \(for example 12 hours, 60 minutes, etc.\). | True | - | Activity Logs Fetch Interval | Time between fetches of activity logs. | False | - | Activity Logs Fetch Limit | Maximum number of activity logs to fetch per fetch iteration. | True | - | Host Detections Fetch Limit | Maximum number of hosts to return in a single fetch iteration. | True | - -4. Click **Test** to validate the URLs, token, and connection. - -## Commands - -You can execute these commands from the Cortex XSIAM CLI, as part of an automation, or in a playbook. -After you successfully execute a command, a DBot message appears in the War Room with the command details. - -### qualys-get-activity-logs - -*** -Gets activity logs from Qualys. - -#### Base Command - -`qualys-get-activity-logs` - -#### Input - -| **Argument Name** | **Description** | **Required** | -| --- | --- | --- | -| should_push_events | If true, the command will create events, otherwise it will only display them. Possible values are: true, false. Default is false. | Required | -| limit | Maximum results to return. | Optional | -| since_datetime | Date to return results from. | Optional | -| offset | Offset which events to return. | Optional | - -#### Context Output - -There is no context output for this command. -### qualys-get-host-detections - -*** -Gets host detections from Qualys. - -#### Base Command - -`qualys-get-host-detections` - -#### Input - -| **Argument Name** | **Description** | **Required** | -| --- | --- | --- | -| should_push_events | If true, the command will create events, otherwise it will only display them. Possible values are: true, false. Default is false. | Required | -| limit | Maximum number of results to return. | Optional | -| offset | Offset which events to return. | Optional | -| vm_scan_date_after | Date to return results from. | Optional | - -#### Context Output - -There is no context output for this command. diff --git a/Packs/qualys/Integrations/QualysEventCollector/command_examples b/Packs/qualys/Integrations/QualysEventCollector/command_examples deleted file mode 100644 index ff11963bcedb..000000000000 --- a/Packs/qualys/Integrations/QualysEventCollector/command_examples +++ /dev/null @@ -1,2 +0,0 @@ -!qualys-get-host-detections limit=2 -!qualys-get-activity-logs limit=2 \ No newline at end of file diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py index b3e6c577efcb..7e2ddbce415a 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.py @@ -1,8 +1,10 @@ +import copy import demistomock as demisto # noqa: F401 from CommonServerPython import * # noqa: F401 from collections.abc import Callable - - +from typing import Any +import csv +import io import requests from urllib3 import disable_warnings @@ -11,6 +13,25 @@ disable_warnings() # pylint: disable=no-member """ CONSTANTS """ +VENDOR = 'qualys' +PRODUCT = 'qualys' +BEGIN_RESPONSE_LOGS_CSV = "----BEGIN_RESPONSE_BODY_CSV" +END_RESPONSE_LOGS_CSV = "----END_RESPONSE_BODY_CSV" +BEGIN_RESPONSE_FOOTER_CSV = "----BEGIN_RESPONSE_FOOTER_CSV" +END_RESPONSE_FOOTER_CSV = "----END_RESPONSE_FOOTER_CSV" +WARNING = 'WARNING' +ACTIVITY_LOGS_NEWEST_EVENT_DATETIME = 'activity_logs_newest_event_datetime' +ACTIVITY_LOGS_NEXT_PAGE = 'activity_logs_next_page' +ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN = 'activity_logs_since_datetime_prev_run' +HOST_DETECTIONS_NEWEST_EVENT_DATETIME = 'host_detections_newest_event_datetime' +HOST_DETECTIONS_NEXT_PAGE = 'host_detections_next_page' +HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN = 'host_detections_since_datetime_prev_run' +HOST_LAST_FETCH = 'host_last_fetch' +ASSETS_FETCH_FROM = '90 days' +MIN_ASSETS_INTERVAL = 59 +HOST_LIMIT = 1000 + +ASSETS_DATE_FORMAT = '%Y-%m-%d' DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ" # ISO8601 format with UTC, default in XSOAR API_SUFFIX = "/api/2.0/fo/" @@ -1610,6 +1631,85 @@ def command_http_request(self, command_api_data: dict[str, str]) -> Union[str, b error_handler=self.error_handler, ) + def get_user_activity_logs(self, since_datetime: str, max_fetch: int = 0, next_page=None) -> Union[str, bytes]: + """ + Make a http request to Qualys API to get user activities logs + Args: + Returns: + response from Qualys API + Raises: + DemistoException: can be raised by the _http_request function + """ + self._headers.update({"Content-Type": 'application/json'}) + params: dict[str, Any] = { + "truncation_limit": max_fetch + } + if since_datetime: + params["since_datetime"] = since_datetime + if next_page: + params["id_max"] = next_page + + response = self._http_request( + method='GET', + url_suffix=urljoin(API_SUFFIX, 'activity_log/?action=list'), + resp_type='text/csv', + params=params, + timeout=60, + error_handler=self.error_handler, + ) + + return response.text + + def get_host_list_detection(self, since_datetime, next_page=None) -> Union[str, bytes]: + """ + Make a http request to Qualys API to get assets + Args: + Returns: + response from Qualys API + Raises: + DemistoException: can be raised by the _http_request function + """ + self._headers.update({"Content-Type": 'application/json'}) + params: dict[str, Any] = { + "truncation_limit": HOST_LIMIT, + "vm_scan_date_after": since_datetime + } + if next_page: + params["id_min"] = next_page + + response = self._http_request( + method='GET', + url_suffix=urljoin(API_SUFFIX, 'asset/host/vm/detection/?action=list'), + resp_type='text', + params=params, + timeout=60, + error_handler=self.error_handler, + ) + return response + + def get_vulnerabilities(self, since_datetime) -> Union[str, bytes]: + """ + Make a http request to Qualys API to get vulnerabilities + Args: + Returns: + response from Qualys API + Raises: + DemistoException: can be raised by the _http_request function + """ + self._headers.update({"Content-Type": 'application/json'}) + params: dict[str, Any] = {"last_modified_after": since_datetime} + + response = self._http_request( + method='POST', + url_suffix=urljoin(API_SUFFIX, 'knowledge_base/vuln/?action=list'), + resp_type='text', + params=params, + timeout=60, + error_handler=self.error_handler, + ) + + return response + """ HELPER FUNCTIONS """ @@ -2574,23 +2674,399 @@ def build_tag_asset_output(**kwargs) -> tuple[List[Any], str]: return handled_result, readable_output -""" COMMAND FUNCTIONS """ +def get_partial_response(response: str, start: str, end: str): + """ Cut response string from start to end tokens. + """ + if start not in response or end not in response: + return None + start_index = response.index(start) + len(start) + end_index = response.index(end) + result = response[start_index:end_index].strip() + if result.startswith(WARNING): + result = result.replace(WARNING, '').strip() + return result -@logger -def test_module(client: Client) -> str: +def skip_fetch_assets(last_run): + """ Checks if enough time has passed since the previous run. + Args: + last_run: Last run time. + Returns: + Returns true or false if enough time has passed since the previous run. + """ + time_to_check = last_run.get("assets_last_fetch") + if not time_to_check: + return False + passed_minutes = (time.time() - time_to_check) / 60 + if passed_minutes < MIN_ASSETS_INTERVAL: + demisto.info(f"Skipping fetch-assets command. Only {passed_minutes} minutes have passed since the last fetch. " + f"It should be a minimum of 1 hour.") + return True + return False + + +def csv2json(csv_data: str): + """ Converts data from csv to json + Args: + csv_data: data in csv format + Returns: + the same data in json formal + """ + reader = csv.DictReader(io.StringIO(csv_data)) + json_data = list(reader) + return json_data + + +def get_next_page_from_url(url, field): + """ + Get the next page field from url. + """ + match = re.search(rf"{field}=(\d+)", url) + res = match.group(1) if match else None + return res + + +def get_next_page_activity_logs(footer): + """ + Extracts the next token from activity logs response. """ - Makes a http request to qualys API in order to test the connection + if isinstance(footer, list): + footer = footer[0] + next_url = footer.get('URL', '') + max_id = get_next_page_from_url(next_url, 'id_max') + return max_id + + +def handle_host_list_detection_result(raw_response: requests.Response) -> tuple[list, Optional[str]]: + """ + Handles Host list detection response - parses xml to json and gets the list Args: - client: Client object for making a http request + raw_response (requests.Response): the raw result received from Qualys API command Returns: - 'ok' message if the connection test was successful - Raises: - DemistoException: will be raised when connection was not successful by command_http_request + List with data generated for the result given + """ + formatted_response = parse_raw_response(raw_response) + simple_response = get_simple_response_from_raw(formatted_response) + if simple_response and simple_response.get("CODE"): + raise DemistoException(f"\n{simple_response.get('TEXT')} \nCode: {simple_response.get('CODE')}") + + response_requested_value = dict_safe_get(formatted_response, + ["HOST_LIST_VM_DETECTION_OUTPUT", "RESPONSE", "HOST_LIST", "HOST"]) + response_next_url = dict_safe_get(formatted_response, + ["HOST_LIST_VM_DETECTION_OUTPUT", "RESPONSE", "WARNING", "URL"], default_return_value='') + if isinstance(response_requested_value, dict): + response_requested_value = [response_requested_value] + + return response_requested_value, str(response_next_url) + + +def handle_vulnerabilities_result(raw_response: requests.Response) -> list: + """ + Handles vulnerabilities response - parses xml to json and gets the list + Args: + raw_response (requests.Response): the raw result received from Qualys API command + Returns: + List with data generated for the result given + """ + formatted_response = parse_raw_response(raw_response) + + vulnerabilities = dict_safe_get(formatted_response, ['KNOWLEDGE_BASE_VULN_LIST_OUTPUT', 'RESPONSE', 'VULN_LIST', 'VULN']) + if isinstance(vulnerabilities, dict): + vulnerabilities = [vulnerabilities] + + return vulnerabilities + + +def remove_last_events(events, time_to_remove, time_field): + """ Removes events with certain time. + Args: + events: list of events to remove the time from + time_to_remove: remove events with this time + time_field: the field name where the time is + """ + new_events = [] + for event in events: + if event.get(time_field) == time_to_remove: + demisto.debug(f'Removed activity log event with time: {time_to_remove}, log: {event}') + else: + new_events.append(event) + return new_events + + +def add_fields_to_events(events, time_field_path, event_type_field): + """ + Adds the _time key to the events. + Args: + events: List[Dict] - list of events to add the _time key to. + time_field_path: the list of fields to get _time from + event_type_field: type field in order to distinguish between the API's + Returns: + list: The events with the _time key. + """ + if events: + for event in events: + event['_time'] = dict_safe_get(event, time_field_path) + event['event_type'] = event_type_field + + +def get_detections_from_hosts(hosts): + """ + Parses detections from hosts. + Each host contains list of detections: + {'ID':1, + 'IP': '1.1.1.1', + 'LAST_VM_SCANNED_DATE': '01-01-2020', + 'DETECTION_LIST': {'DETECTION': [first_detection_data, second_detection, ...]} + 'additional_fields': ... + } + + The function parses the data in the following way: + {''ID':1, + 'IP': '1.1.1.1', + 'LAST_VM_SCANNED_DATE': '01-01-2020', + 'DETECTION': first_detection_data + 'additional_fields': ... + }, + {'ID':1, + 'IP': '1.1.1.1', + 'LAST_VM_SCANNED_DATE': '01-01-2020', + 'DETECTION': second_detection_data + 'additional_fields': ... + } + .... + + :param hosts: list of hosts that contains detections. + :return: parsed events. + """ + fetched_events = [] + for host in hosts: + if detections_list := host.get('DETECTION_LIST', {}).get('DETECTION'): + if isinstance(detections_list, list): + for detection in detections_list: + new_detection = copy.deepcopy(host) + del new_detection['DETECTION_LIST'] + new_detection['DETECTION'] = detection + fetched_events.append(new_detection) + elif isinstance(detections_list, dict): + new_detection = copy.deepcopy(host) + new_detection['DETECTION'] = detections_list + del new_detection['DETECTION_LIST'] + fetched_events.append(new_detection) + else: + del host['DETECTION_LIST'] + host['DETECTION'] = {} + fetched_events.append(host) + return fetched_events + + +def get_activity_logs_events(client, since_datetime, max_fetch, next_page=None) -> tuple[Optional[list], dict]: + """ Get logs activity from qualys + API response returns events sorted in descending order. We are saving the next_page param and + sending next request with next_page arg if needed. Saving the newest event fetched. + We are deleting the newest event each time to avoid duplication. + Args: + client: Qualys client + since_datetime: datetime to get events from + max_fetch: max number of events to return + next_page: pagination marking + Returns: + Logs activity events, Next run datetime + """ + demisto.debug(f'Starting to fetch activity logs events: since_datetime={since_datetime}, next_page={next_page}') + activity_logs = client.get_user_activity_logs(since_datetime=since_datetime, max_fetch=max_fetch, next_page=next_page) + activity_logs_events = csv2json(get_partial_response(activity_logs, BEGIN_RESPONSE_LOGS_CSV, + END_RESPONSE_LOGS_CSV) or activity_logs) or [] + footer_json = csv2json(get_partial_response(activity_logs, BEGIN_RESPONSE_FOOTER_CSV, + END_RESPONSE_FOOTER_CSV)) or {} + new_next_page = get_next_page_activity_logs(footer_json) + demisto.debug(f'Got activity logs events from server: {len(activity_logs_events)=}.') + + newest_event_time = activity_logs_events[0].get('Date') if activity_logs_events else since_datetime + + if not next_page: + activity_logs_events = remove_last_events(activity_logs_events, newest_event_time, 'Date') + add_fields_to_events(activity_logs_events, ['Date'], 'activity_log') + + next_run_dict = { + ACTIVITY_LOGS_NEWEST_EVENT_DATETIME: newest_event_time, + ACTIVITY_LOGS_NEXT_PAGE: new_next_page, + ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN: since_datetime, + } + demisto.debug(f'Done to fetch activity logs events: {next_run_dict=}, sending {len(activity_logs_events)} events.') + return activity_logs_events, next_run_dict + + +def get_host_list_detections_events(client, since_datetime) -> list: + """ Get host list detections from qualys + Args: + client: Qualys client + since_datetime: The start fetch date. + Returns: + Host list detections assets + """ + demisto.debug('Starting to fetch assets') + assets = [] # type: ignore[var-annotated] + next_page = '' + + while True: + host_list_detections = client.get_host_list_detection(since_datetime, next_page=next_page) + host_list_assets, next_url = handle_host_list_detection_result(host_list_detections) or [] + assets += host_list_assets + next_page = get_next_page_from_url(next_url, 'id_min') + if not next_page: + break + + edited_host_detections = get_detections_from_hosts(assets) + demisto.debug(f'Parsed detections from hosts, got {len(edited_host_detections)=} assets.') + + add_fields_to_events(edited_host_detections, ['DETECTION', 'FIRST_FOUND_DATETIME'], 'host_list_detection') + + return edited_host_detections + + +def get_vulnerabilities(client, since_datetime) -> list: + """ Get vulnerabilities list from qualys + Args: + client: Qualys client + since_datetime: The start fetch date. + Returns: + list vulnerabilities + """ + demisto.debug('Starting to fetch vulnerabilities') + host_list_detections = client.get_vulnerabilities(since_datetime) + vulnerabilities = handle_vulnerabilities_result(host_list_detections) or [] + + demisto.debug(f'Parsed detections from hosts, got {len(vulnerabilities)=} vulnerabilities.') + return vulnerabilities + + +def fetch_assets(client, since_datetime=None): + """ Fetches host list detections + Args: + client: command client + since_datetime: The start fetch date. + Return: + assets: assets to push to xsiam + vulnerabilities: vulnerabilities to push to xsiam + """ + demisto.debug('Starting fetch for assets') + if not since_datetime: + since_datetime = arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT) # type: ignore[union-attr] + + assets = get_host_list_detections_events(client, since_datetime) + vulnerabilities = get_vulnerabilities(client, since_datetime) + + demisto.info(f"Pulled {len(assets)} assets, and {len(vulnerabilities)} vulnerabilities from API, sending them to XSIAM") + return assets, vulnerabilities + + +def fetch_events(client, last_run, first_fetch_time, fetch_function, newest_event_field, next_page_field, + previous_run_time_field, max_fetch: Optional[int] = 0): + """ Fetches activity logs and host list detections + Args: + client: command client + last_run: last fetch time + first_fetch_time: when start to fetch from + fetch_function: function that gets the events + max_fetch: max number of items to return (0 to return all) + newest_event_field + next_page_field + previous_run_time_field + Return: + next_last_run: where to fetch from next time + event: events to push to xsiam + """ + demisto.debug(f'Starting fetch for {fetch_function.__name__}, last run: {last_run}') + newest_event_time = last_run.get(newest_event_field) if last_run else None + next_page = last_run.get(next_page_field) + previous_time_field = last_run.get(previous_run_time_field) + + if not newest_event_time: + newest_event_time = first_fetch_time + + time_to_fetch = newest_event_time if not next_page else previous_time_field + + events, new_next_run = fetch_function(client, time_to_fetch, max_fetch, next_page) + + updated_next_run = {previous_run_time_field: time_to_fetch} + new_next_page = new_next_run.get(next_page_field) + + # if the fetch is not during the pagination (fetched without next_page) + if not next_page: + # update the newest event + updated_next_run[newest_event_field] = new_next_run.get(newest_event_field) + + # update if there is next page and this fetch is not over + updated_next_run[next_page_field] = new_next_page + + if last_fetch_time := new_next_run.get(HOST_LAST_FETCH): + updated_next_run[HOST_LAST_FETCH] = last_fetch_time + + demisto.info(f"Sending len{len(events)} to XSIAM. updated_next_run={updated_next_run}.") + return updated_next_run, events + + +def get_activity_logs_events_command(client, args, first_fetch_time): + """ + Args: + client: command client + args: Demisto args for this command: limit and since_datetime + first_fetch_time: first fetch time + Retuns: + Command results with activity logs + + """ + limit = arg_to_number(args.get('limit', 50)) + offset = arg_to_number(args.get('offset', 0)) + since_datetime = arg_to_datetime(args.get('since_datetime')) + since_datetime = since_datetime.strftime(DATE_FORMAT) if since_datetime else first_fetch_time + activity_logs_events, _ = get_activity_logs_events( + client=client, + since_datetime=since_datetime, + max_fetch=0, + ) + limited_activity_logs_events = activity_logs_events[offset:limit + offset] # type: ignore[index,operator] + activity_logs_hr = tableToMarkdown(name='Activity Logs', t=limited_activity_logs_events) + results = CommandResults( + readable_output=activity_logs_hr, + raw_response=limited_activity_logs_events, + ) + + return limited_activity_logs_events, results + + +def test_module(client: Client, params: dict[str, Any], first_fetch_time: str) -> str: + """ + Tests API connectivity and authentication' + When 'ok' is returned it indicates the integration works like it is supposed to and connection to the service is + successful. + Raises exceptions if something goes wrong. + Args: + client (Client): HelloWorld client to use. + params (Dict): Integration parameters. + first_fetch_time (int): The first fetch time as configured in the integration params. + Returns: + str: 'ok' if test passed, anything else will raise an exception and will fail the test. """ build_args_dict(None, COMMANDS_ARGS_DATA["test-module"], False) client.command_http_request(COMMANDS_API_DATA["test-module"]) - return "ok" + + if params.get('isFetchEvents'): + fetch_events( + client=client, + last_run={}, + first_fetch_time=first_fetch_time, + max_fetch=1, + fetch_function=get_activity_logs_events, + newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, + next_page_field=ACTIVITY_LOGS_NEXT_PAGE, + previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, + ) + if params.get('isFetchAssets'): + since_datetime = arg_to_datetime('3 days').strftime(ASSETS_DATE_FORMAT) # type: ignore[union-attr] + fetch_assets(client=client, since_datetime=since_datetime) + + return 'ok' @logger @@ -2659,8 +3135,10 @@ def qualys_command_flow_manager( def main(): # pragma: no cover params = demisto.params() + args = demisto.args() + command = demisto.command() - base_url = params["url"] + base_url = params.get('url') verify_certificate = not params.get("insecure", False) proxy = params.get("proxy", False) username = params.get("credentials").get("identifier") @@ -2907,27 +3385,76 @@ def main(): # pragma: no cover }, } - requested_command = demisto.command() - - demisto.debug(f"Command being called is {requested_command}") + demisto.debug(f"Command being called is {command}") try: - headers: dict = {"X-Requested-With": "Demisto"} - + headers: dict = {"X-Requested-With": "Cortex"} client = Client( base_url=base_url, username=username, password=password, verify=verify_certificate, headers=headers, proxy=proxy ) - if requested_command == "test-module": - text_res = test_module(client) + first_fetch_datetime: datetime = arg_to_datetime( # type: ignore[assignment] + arg=params.get('first_fetch', '3 days'), + arg_name='First fetch time', + required=True + ) + first_fetch_str = first_fetch_datetime.strftime(DATE_FORMAT) + + if command == "test-module": + text_res = test_module(client, params, first_fetch_str) return_results(text_res) + + elif command == "qualys-get-events": + should_push_events = argToBoolean(args.get('should_push_events', False)) + events, results = get_activity_logs_events_command(client, args, first_fetch_str) + return_results(results) + if should_push_events: + send_events_to_xsiam(events, vendor=VENDOR, product=PRODUCT) + + elif command == "qualys-get-assets": + should_push_events = argToBoolean(args.get('should_push_assets', False)) + assets, vulnerabilities = fetch_assets(client=client) + return_results(f'Pulled {len(assets)} assets, and {len(vulnerabilities)} vulnerabilities from API') + if should_push_events: + send_data_to_xsiam(data=assets, vendor=VENDOR, product='host_detections', data_type='assets') + send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities', data_type='assets') + + elif command == 'fetch-events': + last_run = demisto.getLastRun() + max_fetch_activity_logs = arg_to_number(params.get("max_fetch_activity_logs", 0)) + logs_next_run, activity_logs_events = fetch_events( + client=client, + last_run=last_run, + newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, + next_page_field=ACTIVITY_LOGS_NEXT_PAGE, + previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, + fetch_function=get_activity_logs_events, + first_fetch_time=first_fetch_str, + max_fetch=max_fetch_activity_logs, + ) + send_events_to_xsiam(activity_logs_events, vendor=VENDOR, product=PRODUCT) + + # saves next_run for the time fetch-events is invoked + demisto.setLastRun(logs_next_run) + + elif command == 'fetch-assets': + assets_last_run = demisto.getAssetsLastRun() + demisto.debug(f'saved lastrun assets: {assets_last_run}') + if skip_fetch_assets(assets_last_run): + return + execution_start_time = time.time() + assets, vulnerabilities = fetch_assets(client=client) + send_data_to_xsiam(data=assets, vendor=VENDOR, product='assets', data_type='assets') + send_data_to_xsiam(data=vulnerabilities, vendor=VENDOR, product='vulnerabilities', data_type='assets') + demisto.setAssetsLastRun({'assets_last_fetch': execution_start_time}) + else: return_results( - qualys_command_flow_manager(client, demisto.args(), requested_command, commands_methods[requested_command]) + qualys_command_flow_manager(client, demisto.args(), command, commands_methods[command]) ) except Exception as e: demisto.error(traceback.format_exc()) # print the traceback - return_error(f"Failed to execute {requested_command} command.\nError:\n{str(e)}") + return_error(f"Failed to execute {command} command.\nError:\n{str(e)}") if __name__ in ("__main__", "__builtin__", "builtins"): diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml index 06d8f56edbe3..988215ff9075 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2.yml @@ -1,4 +1,7 @@ category: Vulnerability Management +sectionOrder: +- Connect +- Collect commonfields: id: QualysV2 version: -1 @@ -9,20 +12,50 @@ configuration: required: true type: 0 additionalinfo: "When using asset-tag commands, the official documentation recommends that the SERVER URL parameter should be in the following format: `https://qualysapi..apps.qualys.com/`. For more details see the integration documentation." + section: Connect - display: Username name: credentials required: true type: 9 + section: Connect - display: Trust any certificate (not secure) name: insecure type: 8 required: false + section: Connect + advanced: true - display: Use system proxy settings name: proxy type: 8 required: false -description: Qualys Vulnerability Management lets you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. -display: Qualys v2 + section: Connect + advanced: true +- defaultvalue: 3 days + section: Collect + display: Event first fetch time + name: first_fetch + additionalinfo: If "First event fetch time" is set for a long time ago, it may cause performance issues. + required: false + advanced: true + type: 0 +- defaultvalue: 10000 + section: Collect + display: Event Fetch Limit + name: max_fetch_activity_logs + additionalinfo: Maximum number of events to fetch per fetch iteration. + advanced: true + required: false + type: 0 +- additionalinfo: The fetch interval. It is recommended to set it to 24 hours, and the minimum interval is 1 hour. + defaultvalue: 1440 + display: Assets and Vulnerabilities Fetch Interval + name: assetsFetchInterval + type: 19 + section: Collect + advanced: true + required: false +description: Qualys Vulnerability Management lets you create, run, manage reports and to fetch Activity Logs, Assets and Vulnerabilities, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. +display: Qualys VMDR name: QualysV2 script: commands: @@ -2760,7 +2793,38 @@ script: - name: csv_data description: The CSV data file containing the vCenter - ESXi mapping records that you want to purge. required: true - dockerimage: demisto/python3:3.10.13.87159 + - arguments: + - auto: PREDEFINED + defaultValue: 'false' + description: If true, the command will create events, otherwise it will only display them. + name: should_push_events + predefined: + - 'true' + - 'false' + required: true + - description: Maximum number of results to return. + name: limit + - description: Date to return results from. + name: since_datetime + - description: Offset which events to return. + name: offset + description: Gets activity logs from Qualys. + name: qualys-get-events + - arguments: + - auto: PREDEFINED + defaultValue: 'false' + description: If true, the command will create assets, otherwise it will only display the amount of available assets. + name: should_push_assets + predefined: + - 'true' + - 'false' + required: true + description: Gets assets from Qualys. + name: qualys-get-assets + dockerimage: demisto/python3:3.10.14.90585 + isfetchevents: true + isfetchassets: true + runonce: false script: '' subtype: python3 type: python diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_dark.svg b/Packs/qualys/Integrations/Qualysv2/Qualysv2_dark.svg similarity index 100% rename from Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_dark.svg rename to Packs/qualys/Integrations/Qualysv2/Qualysv2_dark.svg diff --git a/Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_light.svg b/Packs/qualys/Integrations/Qualysv2/Qualysv2_light.svg similarity index 100% rename from Packs/qualys/Integrations/QualysEventCollector/QualysEventCollector_light.svg rename to Packs/qualys/Integrations/Qualysv2/Qualysv2_light.svg diff --git a/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py b/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py index bdd3fb2d835c..2a51fb876c23 100644 --- a/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py +++ b/Packs/qualys/Integrations/Qualysv2/Qualysv2_test.py @@ -22,9 +22,156 @@ parse_raw_response, get_simple_response_from_raw, validate_required_group, + get_activity_logs_events_command, + fetch_events, get_activity_logs_events, fetch_assets, ASSETS_FETCH_FROM, ASSETS_DATE_FORMAT, HOST_LIMIT ) -from CommonServerPython import DemistoException +from CommonServerPython import * # noqa: F401 + + +ACTIVITY_LOGS_NEWEST_EVENT_DATETIME = 'activity_logs_newest_event_datetime' +ACTIVITY_LOGS_NEXT_PAGE = 'activity_logs_next_page' +ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN = 'activity_logs_since_datetime_prev_run' +HOST_DETECTIONS_NEWEST_EVENT_DATETIME = 'host_detections_newest_event_datetime' +HOST_DETECTIONS_NEXT_PAGE = 'host_detections_next_page' +HOST_DETECTIONS_SINCE_DATETIME_PREV_RUN = 'host_detections_since_datetime_prev_run' +HOST_LAST_FETCH = 'host_last_fetch' +BEGIN_RESPONSE_LOGS_CSV = "----BEGIN_RESPONSE_BODY_CSV" +END_RESPONSE_LOGS_CSV = "----END_RESPONSE_BODY_CSV" +FOOTER = """----BEGIN_RESPONSE_FOOTER_CSV +WARNING +"CODE","TEXT","URL" +"1980","17 record limit exceeded. Use URL to get next batch of results.","https://server_url/api/2.0/fo/activity_log/ +?action=list&since_datetime=2022-12-21T03:42:05Z&truncation_limit=10&id_max=123456" +----END_RESPONSE_FOOTER_CSV""" + + +def test_get_activity_logs_events_command(requests_mock): + """ + Given: + - activity_logs_events_command + + When: + - Want to list all existing activity logs + + Then: + - Ensure Activity Logs Results in human-readable, and number of results reasonable. + """ + base_url = 'https://server_url/' + with open('test_data/activity_logs.csv') as f: + logs = f.read() + requests_mock.get(f'{base_url}api/2.0/fo/activity_log/' + f'?action=list&truncation_limit=0&since_datetime=2023-03-01T00%3A00%3A00Z', text=logs) + client = Client(base_url=base_url, + verify=True, + headers={}, + proxy=False, + username='demisto', + password='demisto', + ) + args = {'limit': 50, 'since_datetime': '1 March 2023'} + first_fetch = '2022-03-21T03:42:05Z' + activity_logs_events, results = get_activity_logs_events_command(client, args, first_fetch) + assert 'Activity Logs' in results.readable_output + assert len(activity_logs_events) == 17 + + +@pytest.mark.parametrize('activity_log_last_run, logs_number, add_footer', + [(None, 17, True), + ("2023-05-24T09:55:35Z", 0, True), + ("2023-05-14T15:04:55Z", 7, True), + ("2023-01-01T08:06:44Z", 17, False)]) +def test_fetch_logs_events_command(requests_mock, activity_log_last_run, logs_number, add_footer): + """ + Given: + - fetch events command (fetches logs) + + When: + - Running fetch-events command + + Then: + - Ensure number of events fetched + - Ensure next page token saved + - Ensure previous run saved + - Ensure newest event time saved + """ + first_fetch_str = '2022-12-21T03:42:05Z' + base_url = 'https://server_url/' + truncation_limit = logs_number + with open('test_data/activity_logs.csv') as f: + logs = f.read() + new_logs = f'{BEGIN_RESPONSE_LOGS_CSV}' + for row in logs.split('\n'): + if activity_log_last_run and activity_log_last_run in row: + new_logs += f'{row}\n' + break + new_logs += f'{row}\n' + new_logs += f'{END_RESPONSE_LOGS_CSV}\n' + if add_footer: + new_logs += f'{FOOTER}\n' + + requests_mock.get(f'{base_url}api/2.0/fo/activity_log/' + f'?action=list&truncation_limit={truncation_limit}&' + f'since_datetime={activity_log_last_run if activity_log_last_run else first_fetch_str}', + text=new_logs) + client = Client(base_url=base_url, + verify=True, + headers={}, + proxy=False, + username='demisto', + password='demisto', + ) + last_run = {ACTIVITY_LOGS_NEWEST_EVENT_DATETIME: activity_log_last_run} + + logs_next_run, activity_logs_events = fetch_events( + client=client, + last_run=last_run, + newest_event_field=ACTIVITY_LOGS_NEWEST_EVENT_DATETIME, + next_page_field=ACTIVITY_LOGS_NEXT_PAGE, + previous_run_time_field=ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN, + fetch_function=get_activity_logs_events, + first_fetch_time=first_fetch_str, + max_fetch=truncation_limit, + ) + assert len(activity_logs_events) == logs_number + assert logs_next_run.get(ACTIVITY_LOGS_NEXT_PAGE) == ("123456" if add_footer else None) + assert logs_next_run.get(ACTIVITY_LOGS_SINCE_DATETIME_PREV_RUN) == activity_log_last_run or first_fetch_str + assert logs_next_run.get(ACTIVITY_LOGS_NEWEST_EVENT_DATETIME) == "2023-05-24T09:55:35Z" + + +def test_fetch_assets_command(requests_mock): + """ + Given: + - fetch_assets_command + When: + - Want to list all existing incidents + Then: + - Ensure List assets and vulnerabilities. + """ + base_url = 'https://server_url/' + with open('./test_data/host_list_detections_raw.xml') as f: + assets = f.read() + with open('./test_data/vulnerabilities_raw.xml') as f: + vulnerabilities = f.read() + requests_mock.get(f'{base_url}api/2.0/fo/asset/host/vm/detection/' + f'?action=list&truncation_limit={HOST_LIMIT}&vm_scan_date_after=' + f'{arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', text=assets) + + requests_mock.post(f'{base_url}api/2.0/fo/knowledge_base/vuln/' + f'?action=list&last_modified_after={arg_to_datetime(ASSETS_FETCH_FROM).strftime(ASSETS_DATE_FORMAT)}', + text=vulnerabilities) + + client = Client(base_url=base_url, + verify=True, + headers={}, + proxy=False, + username='demisto', + password='demisto', + ) + assets, vulnerabilities = fetch_assets(client=client) + + assert len(assets) == 8 + assert len(vulnerabilities) == 2 class TestIsEmptyResult: diff --git a/Packs/qualys/Integrations/Qualysv2/README.md b/Packs/qualys/Integrations/Qualysv2/README.md index 12831fcde09b..d2051ba9b5e6 100644 --- a/Packs/qualys/Integrations/Qualysv2/README.md +++ b/Packs/qualys/Integrations/Qualysv2/README.md @@ -1,4 +1,4 @@ -Qualys Vulnerability Management lets you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. +Qualys VMDR lets you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance. This integration was integrated and tested with version 2.0 of QualysVulnerabilityManagement ## Changes compared to V1 @@ -53,22 +53,27 @@ This integration was integrated and tested with version 2.0 of QualysVulnerabili 11. New playbook - qualys-report-launch-compliance-policy-and-fetch.yml -## Configure Qualys v2 on Cortex XSOAR +## Configure Qualys VMDR on Cortex XSOAR 1. Navigate to **Settings** > **Integrations** > **Servers & Services**. -2. Search for Qualys v2. +2. Search for Qualys VMDR. 3. Click **Add instance** to create and configure a new integration instance. - | **Parameter** | **Required** | - | --- | --- | - | Server URL | True | - | Username | True | - | Password | True | + | **Parameter** | **Required** | + |------------------------------------| --- | + | Server URL | True | + | Username | True | + | Password | True | | Trust any certificate (not secure) | False | - | Use system proxy settings | False | + | Use system proxy settings | False | + | First fetch time | True | + | Fetch event Limit | True | 4. Click **Test** to validate the URLs, token, and connection. +## Notes: +- ***Fetch assets and vulnerabilities*** command fetches assets and vulnerabilities from the last 90 days only. + ## Asset Tag Commands There are several API endpoints on the Qualys API that can be used in the QualysV2 integration configuration as the `SERVER URL` parameter. When using `asset-tag` commands, the [official documentation](https://www.qualys.com/docs/qualys-asset-management-tagging-api-v2-user-guide.pdf) recommends that the `SERVER URL` parameter should be in the following format: `https://qualysapi..apps.qualys.com/`. @@ -22062,4 +22067,46 @@ There is no context output for this command. #### Human Readable Output ->Successfully purged 1 record \ No newline at end of file +>Successfully purged 1 record + + +### qualys-get-events +*** +Manual command to fetch events from Qualys and display them. + + +#### Base Command + +`qualys-get-events` +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| should_push_events | If true, the command will create events, otherwise it will only display them. Default is false. | Required | +| limit | Date to return results from. | Optional | +| since_datetime | Date to return results from. | Optional | +| offset | Offset which events to return. | Optional | + + +#### Context Output + +There is no context output for this command. + +### qualys-get-assets +*** +Manual command to fetch assets from Qualys and display them. + + +#### Base Command + +`qualys-get-assets` +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| should_push_assets | If true, the command will create assets, otherwise it will only display the amount of available assets. Default is false. | Required | + + +#### Context Output + +There is no context output for this command. \ No newline at end of file diff --git a/Packs/qualys/Integrations/QualysEventCollector/test_data/activity_logs.csv b/Packs/qualys/Integrations/Qualysv2/test_data/activity_logs.csv similarity index 100% rename from Packs/qualys/Integrations/QualysEventCollector/test_data/activity_logs.csv rename to Packs/qualys/Integrations/Qualysv2/test_data/activity_logs.csv diff --git a/Packs/qualys/Integrations/QualysEventCollector/test_data/host_list_detections_raw.xml b/Packs/qualys/Integrations/Qualysv2/test_data/host_list_detections_raw.xml similarity index 100% rename from Packs/qualys/Integrations/QualysEventCollector/test_data/host_list_detections_raw.xml rename to Packs/qualys/Integrations/Qualysv2/test_data/host_list_detections_raw.xml diff --git a/Packs/qualys/Integrations/Qualysv2/test_data/vulnerabilities_raw.xml b/Packs/qualys/Integrations/Qualysv2/test_data/vulnerabilities_raw.xml new file mode 100644 index 000000000000..7dd200b979de --- /dev/null +++ b/Packs/qualys/Integrations/Qualysv2/test_data/vulnerabilities_raw.xml @@ -0,0 +1,130 @@ + + + + + + 2024-02-12T15:21:40Z + + + 10052 + Vulnerability + 3 + <![CDATA[Vtecrm Vtenext Multiple Security Vulnerabilities]]> + CGI + 2023-12-19T12:02:30Z + 2021-01-21T12:51:22Z + 0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + + + + 1 + Exploit Available + + + + 10186 + Vulnerability + 2 + <![CDATA[]]> + CGI + 2024-02-10T12:03:39Z + 2000-11-10T11:00:00Z + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + 1 + Patch Available, Exploit Available + + + + + + + diff --git a/Packs/qualys/ReleaseNotes/3_0_0.json b/Packs/qualys/ReleaseNotes/3_0_0.json new file mode 100644 index 000000000000..82a24efeca3e --- /dev/null +++ b/Packs/qualys/ReleaseNotes/3_0_0.json @@ -0,0 +1 @@ +{"breakingChanges":true,"breakingChangesNotes":"The ***Qualys Event Collector*** was removed from this pack. To fetch events and assets from Qualys, use the ***Qualys VMDR*** integration instead. Renamed the integration from ***Qualys v2*** to ***Qualys VMDR***."} diff --git a/Packs/qualys/ReleaseNotes/3_0_0.md b/Packs/qualys/ReleaseNotes/3_0_0.md new file mode 100644 index 000000000000..c148fd2bd57f --- /dev/null +++ b/Packs/qualys/ReleaseNotes/3_0_0.md @@ -0,0 +1,11 @@ + +#### Integrations +##### Qualys VMDR +- **Breaking Change**: The ***Qualys Event Collector*** was removed from this pack. To fetch events and assets from Qualys, use the ***Qualys VMDR*** integration instead. +- Updated the Docker image to: *demisto/python3:3.10.14.90585*. +- Renamed the integration from ***Qualys v2*** to ***Qualys VMDR***. +- Added the following commands: + - ***fetch-events*** + - ***fetch-assets*** + - ***qualys-get-events*** + - ***qualys-get-assets*** diff --git a/Packs/qualys/pack_metadata.json b/Packs/qualys/pack_metadata.json index 3720a9d68b7d..4bf840be3a18 100644 --- a/Packs/qualys/pack_metadata.json +++ b/Packs/qualys/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Qualys", "description": "Qualys Vulnerability Management let's you create, run, fetch and manage reports, launch and manage vulnerability and compliance scans, and manage the host assets you want to scan for vulnerabilities and compliance", "support": "xsoar", - "currentVersion": "2.0.11", + "currentVersion": "3.0.0", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "",