Skip to content

Commit

Permalink
Cs falcon detections revert (#29833)
Browse files Browse the repository at this point in the history
* Revert "Cs falcon fetch limit issue (#29411)"

This reverts commit f7b7d5c

* Revert "Cs limit in idp detections (#29550)"

This reverts commit 47738d5

* Added rn

* Added rn
  • Loading branch information
ShahafBenYakir committed Sep 22, 2023
1 parent fffe08c commit be6afbf
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 79 deletions.
Expand Up @@ -529,7 +529,7 @@ def detection_to_incident(detection):

incident = {
'name': 'Detection ID: ' + str(detection.get('detection_id')),
'occurred': str(detection.get('first_behavior')),
'occurred': str(detection.get('created_timestamp')),
'rawJSON': json.dumps(detection),
'severity': severity_string_to_int(detection.get('max_severity_displayname'))
}
Expand Down Expand Up @@ -1306,7 +1306,7 @@ def get_fetch_detections(last_created_timestamp=None, filter_arg=None, offset: i
if filter_arg:
params['filter'] = filter_arg
elif last_created_timestamp:
params['filter'] = f"first_behavior:>'{last_created_timestamp}'"
params['filter'] = f"created_timestamp:>'{last_created_timestamp}'"
elif last_updated_timestamp:
params['filter'] = f"date_updated:>'{last_updated_timestamp}'"

Expand Down Expand Up @@ -1369,7 +1369,7 @@ def get_idp_detections_ids(filter_arg=None, offset: int = 0, limit=INCIDENTS_PER
:rtype ``dict``
"""
params = {
'sort': 'start_time.asc',
'sort': 'created_timestamp.asc',
'offset': offset,
'filter': filter_arg
}
Expand Down Expand Up @@ -2500,21 +2500,6 @@ def migrate_last_run(last_run: dict[str, str] | list[dict]) -> list[dict]:
return [updated_last_run_detections, updated_last_run_incidents, {}]


def sort_incidents_summaries_by_ids_order(ids_order, full_incidents, id_field):
""" sort incidents list by the order that ids_order list has
Args:
ids_order: list of ids
full_incidents: list of incidents
id_field: name of the id field
Returns:
list[dict]: New last run object.
"""
incidents_by_id = {i[id_field]: i for i in full_incidents}
incidents = [incidents_by_id[i] for i in ids_order]
return incidents


def fetch_incidents():
incidents: list = []
detections: list = []
Expand Down Expand Up @@ -2542,7 +2527,7 @@ def fetch_incidents():
incident_type = 'detection'
fetch_query = demisto.params().get('fetch_query')
if fetch_query:
fetch_query = f"first_behavior:>'{start_fetch_time}'+{fetch_query}"
fetch_query = f"created_timestamp:>'{start_fetch_time}'+{fetch_query}"
detections_ids = demisto.get(get_fetch_detections(filter_arg=fetch_query, limit=fetch_limit), 'resources')
else:
detections_ids = demisto.get(get_fetch_detections(last_created_timestamp=start_fetch_time, limit=fetch_limit),
Expand All @@ -2551,22 +2536,18 @@ def fetch_incidents():
raw_res = get_detections_entities(detections_ids)

if raw_res is not None and "resources" in raw_res:
full_detections = demisto.get(raw_res, "resources")
sorted_detections = sort_incidents_summaries_by_ids_order(ids_order=detections_ids,
full_incidents=full_detections,
id_field='detection_id')
for detection in sorted_detections:
for detection in demisto.get(raw_res, "resources"):
detection['incident_type'] = incident_type
demisto.debug(
f"CrowdStrikeFalconMsg: Detection {detection['detection_id']} "
f"was fetched which was created in {detection['first_behavior']}")
f"was fetched which was created in {detection['created_timestamp']}")
incident = detection_to_incident(detection)

detections.append(incident)

detections = filter_incidents_by_duplicates_and_limit(incidents_res=detections,
last_run=current_fetch_info_detections,
fetch_limit=INCIDENTS_PER_FETCH, id_field='name')
fetch_limit=fetch_limit, id_field='name')

for detection in detections:
occurred = dateparser.parse(detection["occurred"])
Expand Down Expand Up @@ -2602,17 +2583,13 @@ def fetch_incidents():
if incidents_ids:
raw_res = get_incidents_entities(incidents_ids)
if raw_res is not None and "resources" in raw_res:
full_incidents = demisto.get(raw_res, "resources")
sorted_incidents = sort_incidents_summaries_by_ids_order(ids_order=incidents_ids,
full_incidents=full_incidents,
id_field='incident_id')
for incident in sorted_incidents:
for incident in demisto.get(raw_res, "resources"):
incident['incident_type'] = incident_type
incident_to_context = incident_to_incident_context(incident)
incidents.append(incident_to_context)

incidents = filter_incidents_by_duplicates_and_limit(incidents_res=incidents, last_run=current_fetch_info_incidents,
fetch_limit=INCIDENTS_PER_FETCH, id_field='name')
fetch_limit=fetch_limit, id_field='name')
for incident in incidents:
occurred = dateparser.parse(incident["occurred"])
if occurred:
Expand All @@ -2632,26 +2609,22 @@ def fetch_incidents():
date_format=IDP_DATE_FORMAT)
fetch_limit = current_fetch_info_idp_detections.get('limit') or INCIDENTS_PER_FETCH
fetch_query = demisto.params().get('idp_detections_fetch_query', "")
filter = f"product:'idp'+start_time:>'{start_fetch_time}'"
filter = f"product:'idp'+created_timestamp:>'{start_fetch_time}'"

if fetch_query:
filter += f"+{fetch_query}"
idp_detections_ids = demisto.get(get_idp_detections_ids(filter_arg=filter, limit=fetch_limit), 'resources')
if idp_detections_ids:
raw_res = get_idp_detection_entities(idp_detections_ids)
if "resources" in raw_res:
full_detections = demisto.get(raw_res, "resources")
sorted_detections = sort_incidents_summaries_by_ids_order(ids_order=idp_detections_ids,
full_incidents=full_detections,
id_field='composite_id')
for idp_detection in sorted_detections:
for idp_detection in demisto.get(raw_res, "resources"):
idp_detection['incident_type'] = IDP_DETECTION
idp_detection_to_context = idp_detection_to_incident_context(idp_detection)
idp_detections.append(idp_detection_to_context)

idp_detections = filter_incidents_by_duplicates_and_limit(incidents_res=idp_detections,
last_run=current_fetch_info_idp_detections,
fetch_limit=INCIDENTS_PER_FETCH, id_field='name')
fetch_limit=fetch_limit, id_field='name')
updated_last_run = update_last_run_object(last_run=current_fetch_info_idp_detections, incidents=idp_detections,
fetch_limit=fetch_limit,
start_fetch_time=start_fetch_time, end_fetch_time=end_fetch_time,
Expand Down
Expand Up @@ -4375,7 +4375,7 @@ script:
- contextPath: CrowdStrike.IDPEntity.EmailAddresses
description: The identity entity email address.
type: String
dockerimage: demisto/py3-tools:1.0.0.73055
dockerimage: demisto/py3-tools:1.0.0.74702
isfetch: true
ismappable: true
isremotesyncin: true
Expand Down
Expand Up @@ -2184,13 +2184,10 @@ def set_up_mocks(self, requests_mock, mocker):
requests_mock.post(f'{SERVER_URL}/detects/entities/summaries/GET/v1',
json={'resources': [{'detection_id': 'ldt:1',
'created_timestamp': '2020-09-04T09:16:11Z',
'max_severity_displayname': 'Low',
'first_behavior': '2020-09-04T09:16:11Z'
},
'max_severity_displayname': 'Low'},
{'detection_id': 'ldt:2',
'created_timestamp': '2020-09-04T09:20:11Z',
'max_severity_displayname': 'Low',
'first_behavior': '2020-09-04T09:16:11Z'}]})
'max_severity_displayname': 'Low'}]})
requests_mock.get(f'{SERVER_URL}/incidents/queries/incidents/v1', json={})
requests_mock.post(f'{SERVER_URL}/incidents/entities/incidents/GET/v1', json={})

Expand Down Expand Up @@ -2258,16 +2255,11 @@ def test_new_fetch(self, set_up_mocks, mocker, requests_mock):
requests_mock.post(f'{SERVER_URL}/detects/entities/summaries/GET/v1',
json={'resources': [{'detection_id': 'ldt:1',
'created_timestamp': '2020-09-04T09:16:11Z',
'max_severity_displayname': 'Low', 'first_behavior': '2020-09-04T09:16:11Z'},
{'detection_id': 'ldt:2',
'created_timestamp': '2020-09-04T09:16:11Z',
'max_severity_displayname': 'Low', 'first_behavior': '2020-09-04T09:16:11Z'}
]})
'max_severity_displayname': 'Low'}]})
from CrowdStrikeFalcon import fetch_incidents
fetch_incidents()
assert demisto.setLastRun.mock_calls[0][1][0][0] == {
'time': '2020-09-04T09:16:11Z', 'limit': 2, "found_incident_ids": {'Detection ID: ldt:1': 1599210970,
'Detection ID: ldt:2': 1599210970}}
'time': '2020-09-04T09:16:11Z', 'limit': 2, "found_incident_ids": {'Detection ID: ldt:1': 1599210970}}

def test_fetch_incident_type(self, set_up_mocks, mocker):
"""
Expand Down Expand Up @@ -2402,14 +2394,12 @@ def test_new_fetch(self, set_up_mocks, mocker, requests_mock):
'offset': 2}, {}])
# Override post to have 1 results so FETCH_LIMIT won't be reached
requests_mock.post(f'{SERVER_URL}/incidents/entities/incidents/GET/v1',
json={'resources': [{'incident_id': 'ldt:1', 'start': '2020-09-04T09:16:11Z'},
{'incident_id': 'ldt:2', 'start': '2020-09-04T09:16:11Z'}]})
json={'resources': [{'incident_id': 'ldt:1', 'start': '2020-09-04T09:16:11Z'}]})
from CrowdStrikeFalcon import fetch_incidents
fetch_incidents()
assert demisto.setLastRun.mock_calls[0][1][0][1] == {'time': '2020-09-04T09:16:11Z',
'limit': 2,
'found_incident_ids': {'Incident ID: ldt:1': 1598462533,
'Incident ID: ldt:2': 1598462533}}
'found_incident_ids': {'Incident ID: ldt:1': 1598462533}}

def test_incident_type_in_fetch(self, set_up_mocks, mocker):
"""Tests the addition of incident_type field to the context
Expand Down Expand Up @@ -5512,24 +5502,3 @@ def test_list_detection_summaries_command_no_results(mocker):
mocker.patch('CrowdStrikeFalcon.http_request', return_value=response)
res = list_detection_summaries_command()
assert res.readable_output == '### CrowdStrike Detections\n**No entries.**\n'


def test_sort_incidents_summaries_by_ids_order():
"""
Test sort incidents in the order by incidents ids
Given:
- Full incidents response, sorted ids
When:
- Searching for detections using fetch_incidents()
Then:
- The incidents returned in sorted order
"""
from CrowdStrikeFalcon import sort_incidents_summaries_by_ids_order
full_incidents = [{"id": "2", "name": "test2"},
{"id": "3", "name": "test3"},
{"id": "1", "name": "test1"}]
res = sort_incidents_summaries_by_ids_order(ids_order=["1", "2", "3"], full_incidents=full_incidents, id_field="id")
assert res == [{"id": "1", "name": "test1"}, {"id": "2", "name": "test2"},
{"id": "3", "name": "test3"},
]
8 changes: 8 additions & 0 deletions Packs/CrowdStrikeFalcon/ReleaseNotes/1_11_10.md
@@ -0,0 +1,8 @@

#### Integrations

##### CrowdStrike Falcon
- Updated the Docker image to: *demisto/py3-tools:1.0.0.74702*.

- Reverted **fetch incidents** changes made on version 1.11.8 and 1.11.9 as they introduced issues.

1 change: 1 addition & 0 deletions Packs/CrowdStrikeFalcon/ReleaseNotes/1_11_8.md
Expand Up @@ -6,3 +6,4 @@
- Fixed an issue where the **Max incidents per fetch** parameter did not work properly.
- Updated the Docker image to: *demisto/py3-tools:1.0.0.73055*.

WARNING: This version of the pack contains a possible bug in fetching detections we highly recommend to install version 1.11.10 or higher.
4 changes: 3 additions & 1 deletion Packs/CrowdStrikeFalcon/ReleaseNotes/1_11_9.md
Expand Up @@ -3,4 +3,6 @@

##### CrowdStrike Falcon

- Fixed an issue where there was inconsistency in the last_run object for **IDP Detection** incident type.
- Fixed an issue where there was inconsistency in the last_run object for **IDP Detection** incident type.

WARNING: This version of the pack contains a possible bug in fetching detections we highly recommend to install version 1.11.10 or higher.
2 changes: 1 addition & 1 deletion Packs/CrowdStrikeFalcon/pack_metadata.json
Expand Up @@ -2,7 +2,7 @@
"name": "CrowdStrike Falcon",
"description": "The CrowdStrike Falcon OAuth 2 API (formerly the Falcon Firehose API), enables fetching and resolving detections, searching devices, getting behaviors by ID, containing hosts, and lifting host containment.",
"support": "xsoar",
"currentVersion": "1.11.9",
"currentVersion": "1.11.10",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down

0 comments on commit be6afbf

Please sign in to comment.