Skip to content

Commit

Permalink
Merge branch 'contrib/PaloAltoNetworks_GCP-EXPANDR-6261' into GCP-EXP…
Browse files Browse the repository at this point in the history
…ANDR-6261
  • Loading branch information
johnnywilkes committed Oct 4, 2023
2 parents 36f2f6c + a5f6322 commit 0ac4939
Show file tree
Hide file tree
Showing 59 changed files with 2,072 additions and 458 deletions.
6 changes: 5 additions & 1 deletion .gitlab/ci/.gitlab-ci.on-push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,11 @@ xpanse-prepare-testing-bucket:
- section_start "Destroy Instances"
- python3 ./Tests/scripts/destroy_instances.py --artifacts-dir $ARTIFACTS_FOLDER --env-file $ARTIFACTS_FOLDER/env_results.json --instance-role "$INSTANCE_ROLE"
- if [ $? -ne 0 ]; then
echo "Failed to destroy instances, exit code"$?";
- python3 ./Tests/scripts/destroy_instances.py --artifacts-dir $ARTIFACTS_FOLDER --env-file $ARTIFACTS_FOLDER/env_results.json --instance-role "$INSTANCE_ROLE"
- destroy_instances_exit_code=$?
- |
if [ ${destroy_instances_exit_code} -ne 0 ]; then
echo "Failed to destroy instances, exit code: ${destroy_instances_exit_code}"
fi
- section_end "Destroy Instances"

Expand Down
4 changes: 4 additions & 0 deletions Packs/ApiModules/ReleaseNotes/2_2_20.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@

#### Scripts
##### MicrosoftGraphMailApiModule
Improved implementation by consolidating shared code between **MicrosoftGraphMail** and **MicrosoftGraphListener** into a shared module.
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def __init__(self, mailbox_to_fetch, folder_to_fetch, first_fetch_interval, emai
display_full_email_body: bool = False,
mark_fetched_read: bool = False,
look_back: int | None = 0,
fetch_html_formatting=True,
**kwargs):
super().__init__(retry_on_rate_limit=True, managed_identities_resource_uri=Resources.graph,
command_prefix="msgraph-mail",
Expand All @@ -46,6 +47,7 @@ def __init__(self, mailbox_to_fetch, folder_to_fetch, first_fetch_interval, emai
self._display_full_email_body = display_full_email_body
self._mark_fetched_read = mark_fetched_read
self._look_back = look_back
self.fetch_html_formatting = fetch_html_formatting

@classmethod
def _build_attachments_input(cls, ids, attach_names=None, is_inline=False):
Expand Down Expand Up @@ -751,6 +753,54 @@ def send_mail_with_upload_session_flow(self, email: str, json_data: dict,
)
self.send_draft(email=email, draft_id=draft_id) # send the draft email

def _fetch_last_emails(self, folder_id, last_fetch, exclude_ids):
"""
Fetches emails from given folder that were modified after specific datetime (last_fetch).
All fields are fetched for given email using select=* clause,
for more information https://docs.microsoft.com/en-us/graph/query-parameters.
The email will be excluded from returned results if it's id is presented in exclude_ids.
Number of fetched emails is limited by _emails_fetch_limit parameter.
The filtering and ordering is done based on modified time.
:type folder_id: ``str``
:param folder_id: Folder id
:type last_fetch: ``str``
:param last_fetch: Previous fetch date
:type exclude_ids: ``list``
:param exclude_ids: List of previous fetch email ids to exclude in current run
:return: Fetched emails and exclude ids list that contains the new ids of fetched emails
:rtype: ``list`` and ``list``
"""
demisto.debug(f'Fetching emails since {last_fetch}')
fetched_emails = self.get_emails(exclude_ids=exclude_ids, last_fetch=last_fetch,
folder_id=folder_id, overwrite_rate_limit_retry=True,
mark_emails_as_read=self._mark_fetched_read)

fetched_emails_ids = {email.get('id') for email in fetched_emails}
exclude_ids_set = set(exclude_ids)
if not fetched_emails or not (filtered_new_email_ids := fetched_emails_ids - exclude_ids_set):
# no new emails
demisto.debug(f'No new emails: {fetched_emails_ids=}. {exclude_ids_set=}')
return [], exclude_ids

new_emails = [mail for mail in fetched_emails
if mail.get('id') in filtered_new_email_ids][:self._emails_fetch_limit]

last_email_time = new_emails[-1].get('receivedDateTime')
if last_email_time == last_fetch:
# next fetch will need to skip existing exclude_ids
excluded_ids_for_nextrun = exclude_ids + [email.get('id') for email in new_emails]
else:
# next fetch will need to skip messages the same time as last_email
excluded_ids_for_nextrun = [email.get('id') for email in new_emails if
email.get('receivedDateTime') == last_email_time]

return new_emails, excluded_ids_for_nextrun

def get_emails_from_api(self, folder_id: str, last_fetch: str, limit: int,
body_as_text: bool = True,
overwrite_rate_limit_retry: bool = False):
Expand All @@ -770,6 +820,117 @@ def get_emails_from_api(self, folder_id: str, last_fetch: str, limit: int,
overwrite_rate_limit_retry=overwrite_rate_limit_retry,
).get('value', [])

def get_emails(self, exclude_ids, last_fetch, folder_id, overwrite_rate_limit_retry=False,
mark_emails_as_read: bool = False) -> list:

emails_as_html = self.get_emails_from_api(folder_id,
last_fetch,
body_as_text=False,
limit=len(exclude_ids) + self._emails_fetch_limit, # fetch extra incidents
overwrite_rate_limit_retry=overwrite_rate_limit_retry)

emails_as_text = self.get_emails_from_api(folder_id,
last_fetch,
limit=len(exclude_ids) + self._emails_fetch_limit, # fetch extra incidents
overwrite_rate_limit_retry=overwrite_rate_limit_retry)

if mark_emails_as_read:
for email in emails_as_html:
if email.get('id'):
self.update_email_read_status(
user_id=self._mailbox_to_fetch,
message_id=email["id"],
read=True,
folder_id=folder_id)

return self.get_emails_as_text_and_html(emails_as_html=emails_as_html, emails_as_text=emails_as_text)

@staticmethod
def get_emails_as_text_and_html(emails_as_html, emails_as_text):

text_emails_ids = {email.get('id'): email for email in emails_as_text}
emails_as_html_and_text = []

for email_as_html in emails_as_html:
html_email_id = email_as_html.get('id')
text_email_data = text_emails_ids.get(html_email_id) or {}
if not text_email_data:
demisto.info(f'There is no matching text email to html email-ID {html_email_id}')

body_as_text = text_email_data.get('body')
if body_as_html := email_as_html.get('body'):
email_as_html['body'] = [body_as_html, body_as_text]

unique_body_as_text = text_email_data.get('uniqueBody')
if unique_body_as_html := email_as_html.get('uniqueBody'):
email_as_html['uniqueBody'] = [unique_body_as_html, unique_body_as_text]

emails_as_html_and_text.append(email_as_html)

return emails_as_html_and_text

@staticmethod
def get_email_content_as_text_and_html(email):
email_body: tuple = email.get('body') or () # email body including replyTo emails.
email_unique_body: tuple = email.get('uniqueBody') or () # email-body without replyTo emails.

# there are situations where the 'body' key won't be returned from the api response, hence taking the uniqueBody
# in those cases for both html/text formats.
try:
email_content_as_html, email_content_as_text = email_body or email_unique_body
except ValueError:
demisto.info(f'email body content is missing from email {email}')
return '', ''

return email_content_as_html.get('content'), email_content_as_text.get('content')

def _parse_email_as_incident(self, email, overwrite_rate_limit_retry=False):
"""
Parses fetched emails as incidents.
:type email: ``dict``
:param email: Fetched email to parse
:return: Parsed email
:rtype: ``dict``
"""
# there are situations where the 'body' key won't be returned from the api response, hence taking the uniqueBody
# in those cases for both html/text formats.

def body_extractor(email, parsed_email):
email_content_as_html, email_content_as_text = self.get_email_content_as_text_and_html(email)
parsed_email['Body'] = email_content_as_html if self.fetch_html_formatting else email_content_as_text
parsed_email['Text'] = email_content_as_text
parsed_email['BodyType'] = 'html' if self.fetch_html_formatting else 'text'

parsed_email = GraphMailUtils.parse_item_as_dict(email, body_extractor)

# handling attachments of fetched email
attachments = self._get_email_attachments(
message_id=email.get('id', ''),
overwrite_rate_limit_retry=overwrite_rate_limit_retry
)
if attachments:
parsed_email['Attachments'] = attachments

parsed_email['Mailbox'] = self._mailbox_to_fetch

body = email.get('bodyPreview', '')
if not body or self._display_full_email_body:
_, body = self.get_email_content_as_text_and_html(email)

incident = {
'name': parsed_email.get('Subject'),
'details': body,
'labels': GraphMailUtils.parse_email_as_labels(parsed_email),
'occurred': parsed_email.get('ReceivedTime'),
'attachment': parsed_email.get('Attachments', []),
'rawJSON': json.dumps(parsed_email),
'ID': parsed_email.get('ID') # only used for look-back to identify the email in a unique way
}

return incident


# HELPER FUNCTIONS
class GraphMailUtils:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ subtype: python3
tags:
- infra
- server
comment: Common Microsoft Graph Mail code that will be appended into the Microsoft Graph Mail integrations when it's deployed
comment: Common Microsoft Graph Mail code that will be appended into the Microsoft Graph Mail integrations when it's deployed.
system: true
scripttarget: 0
dependson: {}
Expand Down
2 changes: 1 addition & 1 deletion Packs/ApiModules/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "ApiModules",
"description": "API Modules",
"support": "xsoar",
"currentVersion": "2.2.19",
"currentVersion": "2.2.20",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down
60 changes: 41 additions & 19 deletions Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def fetch_by_aql_query(self, aql_query: str, max_fetch: int, after: None | datet
list[dict]: List of events objects represented as dictionaries.
"""
params: dict[str, Any] = {'aql': aql_query, 'includeTotal': 'true', 'length': max_fetch, 'orderBy': 'time'}
if not after: # if this is the first fetch run of the instance, use current datetime as starting point
if not after: # this should only happen when get-events command is used without from_date argument
after = datetime.now()
params['aql'] += f' after:{after.strftime(DATE_FORMAT)}' # add 'after' date filter to AQL query in the desired format
raw_response = self._http_request(url_suffix='/search/', method='GET', params=params, headers=self._headers)
Expand Down Expand Up @@ -380,9 +380,10 @@ def handle_fetched_events(events: list[dict[str, Any]], next_run: dict[str, str
)
demisto.setLastRun(next_run)
demisto.debug(f'debug-log: {len(events)} events were sent to XSIAM.')
demisto.debug(f'debug-log: {next_run=}')
else:
demisto.debug('debug-log: No new events fetched, Last run was not updated.')
demisto.debug('debug-log: No new events fetched.')

demisto.debug(f'debug-log: {next_run=}')


def events_to_command_results(events: list[dict[str, Any]]) -> CommandResults:
Expand All @@ -401,6 +402,21 @@ def events_to_command_results(events: list[dict[str, Any]]) -> CommandResults:
removeNull=True))


def set_last_run_with_current_time(last_run: dict, event_types_to_fetch) -> None:
""" Set last fetch time values for all selected event types to current time.
This will set a fetch starting time until events are fetched for each event type.
Args:
last_run (dict): Last run dictionary.
event_types_to_fetch (list): List of event types to fetch.
"""
now: datetime = datetime.now()
now_str: str = now.strftime(DATE_FORMAT)
for event_type in event_types_to_fetch:
last_fetch_time = f'{EVENT_TYPES[event_type].type}_last_fetch_time'
last_run[last_fetch_time] = now_str


''' MAIN FUNCTION '''


Expand Down Expand Up @@ -436,27 +452,33 @@ def main(): # pragma: no cover
elif command in ('fetch-events', 'armis-get-events'):
should_return_results = False

if command == 'armis-get-events':
last_run = {}
should_return_results = True
if not last_run: # initial fetch - update last fetch time values to current time
set_last_run_with_current_time(last_run, event_types_to_fetch)
demisto.setLastRun(last_run)
demisto.debug('debug-log: Initial fetch - updating last fetch time value to current time for each event type.')

else:
if command == 'armis-get-events':
last_run = {}
should_return_results = True

should_push_events = (command == 'fetch-events' or should_push_events)
should_push_events = (command == 'fetch-events' or should_push_events)

events, next_run = fetch_events(
client=client,
max_fetch=max_fetch,
last_run=last_run,
fetch_start_time=fetch_start_time,
event_types_to_fetch=event_types_to_fetch,
)
events, next_run = fetch_events(
client=client,
max_fetch=max_fetch,
last_run=last_run,
fetch_start_time=fetch_start_time,
event_types_to_fetch=event_types_to_fetch,
)

demisto.debug(f'debug-log: {len(events)} events fetched from armis api')
demisto.debug(f'debug-log: {len(events)} events fetched from armis api')

if should_push_events:
handle_fetched_events(events, next_run)
if should_push_events:
handle_fetched_events(events, next_run)

if should_return_results:
return_results(events_to_command_results(events))
if should_return_results:
return_results(events_to_command_results(events))

else:
raise NotImplementedError(f'Command {command} is not implemented')
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from ArmisEventCollector import Client, datetime, DemistoException, arg_to_datetime, EVENT_TYPE, EVENT_TYPES
from ArmisEventCollector import Client, datetime, DemistoException, arg_to_datetime, EVENT_TYPE, EVENT_TYPES, Any
import pytest
from freezegun import freeze_time

Expand Down Expand Up @@ -312,6 +312,26 @@ def test_events_to_command_results(self):
readable_output=tableToMarkdown(name=f'{VENDOR} {PRODUCT} events', t=response_with_two_events, removeNull=True))
assert events_to_command_results(response_with_two_events).readable_output == expected_result.readable_output

@freeze_time("2023-01-01 01:00:00")
def test_set_last_run_with_current_time_initial(self, mocker):
"""
Given:
- A valid list of fetched events.
- An empty last_run dictionary.
When:
- Initial fetch is running.
Then:
- Set the last_run dictionary with the current time for each event type key.
"""
from ArmisEventCollector import set_last_run_with_current_time

last_run: dict[Any, Any] = {}
event_types: list[str] = ['Alerts', 'Threat activities']

set_last_run_with_current_time(last_run, event_types)

assert last_run['alerts_last_fetch_time'] == last_run['threat_activities_last_fetch_time'] == '2023-01-01T01:00:00'


class TestFetchFlow:

Expand Down Expand Up @@ -379,15 +399,17 @@ class TestFetchFlow:
}]

case_first_fetch = ( # type: ignore
# this case test the actual first fetch that runs after the initial fetch (that only sets the last run)
1000,
{},
{'alerts_last_fetch_time': '2023-01-01T01:00:00'},
fetch_start_time,
['Events'],
events_with_different_time_1,
events_with_different_time_1,
{'events_last_fetch_ids': ['3'],
'events_last_fetch_time': '2023-01-01T01:00:30.123456+00:00', 'access_token': 'test_access_token'}
)

case_second_fetch = ( # type: ignore
1000,
{'events_last_fetch_ids': ['1', '2', '3'],
Expand Down
6 changes: 6 additions & 0 deletions Packs/Armis/ReleaseNotes/1_1_3.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

#### Integrations

##### Armis Event Collector

- Updated the initial fetch handling to ensure that the fetch start time remains constant until new events are fetched for each event type.
2 changes: 1 addition & 1 deletion Packs/Armis/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "Armis",
"description": "Agentless and passive security platform that sees, identifies, and classifies every device, tracks behavior, identifies threats, and takes action automatically to protect critical information and systems",
"support": "partner",
"currentVersion": "1.1.2",
"currentVersion": "1.1.3",
"author": "Armis Corporation",
"url": "https://support.armis.com/",
"email": "support@armis.com",
Expand Down
1 change: 1 addition & 0 deletions Packs/BrocadeSwitch/.secrets-ignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
10.1.2.3
Loading

0 comments on commit 0ac4939

Please sign in to comment.