Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add fetch log part2 #27196

Merged
merged 16 commits into from Jun 11, 2023
15 changes: 10 additions & 5 deletions Packs/CortexDataLake/Integrations/CortexDataLake/CortexDataLake.py
Expand Up @@ -866,7 +866,7 @@ def prepare_fetch_incidents_query(fetch_timestamp: str,
"""
if fetch_filter and (fetch_subtype or fetch_severity):
raise DemistoException('Fetch Filter parameter cannot be used with Subtype/Severity parameters.')
query = f'SELECT {fetch_fields} FROM `{fetch_table}` ' # guardrails-disable-line
query = f'SELECT {fetch_fields} FROM `{fetch_table}` ' # guardrails-disable-line # noqa: S608
time_filter = 'event_time' if 'log' in fetch_table else 'time_generated'
query += f'WHERE {time_filter} Between TIMESTAMP("{fetch_timestamp}") ' \
f'AND CURRENT_TIMESTAMP'
Expand Down Expand Up @@ -907,7 +907,7 @@ def test_module(client: Client, fetch_table, fetch_fields, is_fetch, fetch_query
# fetch params not to be tested (won't be used)
fetch_fields = '*'
fetch_table = 'firewall.traffic'
query = f'SELECT {fetch_fields} FROM `{fetch_table}` limit 1'
query = f'SELECT {fetch_fields} FROM `{fetch_table}` limit 1' # noqa: S608
client.query_loggings(query)
return_outputs('ok')

Expand Down Expand Up @@ -1013,7 +1013,7 @@ def search_by_file_hash_command(args: dict, client: Client) -> Tuple[str, Dict[s
file_hash = args.get('SHA256')

query_start_time, query_end_time = query_timestamp(args)
query = f'SELECT * FROM `firewall.threat` WHERE file_sha_256 = "{file_hash}" ' # guardrails-disable-line
query = f'SELECT * FROM `firewall.threat` WHERE file_sha_256 = "{file_hash}" ' # guardrails-disable-line # noqa: S608
query += f'AND time_generated BETWEEN TIMESTAMP("{query_start_time}") AND ' \
f'TIMESTAMP("{query_end_time}") LIMIT {logs_amount}'

Expand Down Expand Up @@ -1105,7 +1105,7 @@ def build_query(args, table_name):
f'TIMESTAMP("{query_end_time}") '
limit = args.get('limit', '5')
where += f' AND {timestamp_limitation}' if where else timestamp_limitation
query = f'SELECT {fields} FROM `firewall.{table_name}` WHERE {where} LIMIT {limit}'
query = f'SELECT {fields} FROM `firewall.{table_name}` WHERE {where} LIMIT {limit}' # noqa: S608
return fields, query


Expand All @@ -1119,6 +1119,8 @@ def fetch_incidents(client: Client,
last_run: dict,
fetch_filter: str = '') -> Tuple[Dict[str, str], list]:
last_fetched_event_timestamp = last_run.get('lastRun')
demisto.debug("CortexDataLake - Start fetching")
demisto.debug(f"CortexDataLake - Last run: {json.dumps(last_run)}")

if last_fetched_event_timestamp:
last_fetched_event_timestamp = parser.parse(last_fetched_event_timestamp)
Expand All @@ -1127,7 +1129,7 @@ def fetch_incidents(client: Client,
last_fetched_event_timestamp = last_fetched_event_timestamp.replace(microsecond=0)
query = prepare_fetch_incidents_query(last_fetched_event_timestamp, fetch_severity, fetch_table,
fetch_subtype, fetch_fields, fetch_limit, fetch_filter)
demisto.debug('Query being fetched: {}'.format(query))
demisto.debug(f"CortexDataLake - Query sent to the server: {query}")
records, _ = client.query_loggings(query)
if not records:
return {'lastRun': str(last_fetched_event_timestamp)}, []
Expand All @@ -1137,6 +1139,9 @@ def fetch_incidents(client: Client,
max_fetched_event_timestamp = max(records, key=lambda record: record.get(time_filter, 0)).get(time_filter, 0)

next_run = {'lastRun': epoch_to_timestamp_and_add_milli(max_fetched_event_timestamp)}
demisto.debug(f'CortexDataLake - Next run after incidents fetching: {json.dumps(next_run)}')
demisto.debug(f"CortexDataLake- Number of incidents before filtering: {len(records)}")
demisto.debug(f"CortexDataLake - Number of incidents after filtering: {len(incidents)}")
return next_run, incidents


Expand Down
Expand Up @@ -2430,7 +2430,7 @@ script:
description: Use this command in case your authentication calls fail due to internal call-limit, the command will reset the limit cache.
execution: false
name: cdl-reset-authentication-timeout
dockerimage: demisto/python_pancloud_v2:1.0.0.60697
dockerimage: demisto/python_pancloud_v2:1.0.0.62401
feed: false
isfetch: true
longRunning: false
Expand Down
7 changes: 7 additions & 0 deletions Packs/CortexDataLake/ReleaseNotes/1_4_6.md
@@ -0,0 +1,7 @@

#### Integrations

##### Cortex Data Lake XSOAR Connector

- Added support for fetch logs.
- Updated the Docker image to: demisto/python_pancloud_v2:1.0.0.62401.
2 changes: 1 addition & 1 deletion Packs/CortexDataLake/pack_metadata.json
Expand Up @@ -2,7 +2,7 @@
"name": "Cortex Data Lake by Palo Alto Networks",
"description": "Palo Alto Networks Cortex Data Lake XSOAR Connector provides cloud-based, centralized log storage and aggregation for your on-premise, virtual (private cloud and public cloud) firewalls, for Prisma Access, and for cloud-delivered services such as Cortex XDR",
"support": "xsoar",
"currentVersion": "1.4.5",
"currentVersion": "1.4.6",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down
9 changes: 8 additions & 1 deletion Packs/GenericSQL/Integrations/GenericSQL/GenericSQL.py
Expand Up @@ -16,7 +16,7 @@
try:
# if integration is using an older image (4.5 Server) we don't have expiringdict
from expiringdict import ExpiringDict # pylint: disable=E0401
except Exception:
except Exception: # noqa: S110
pass


Expand Down Expand Up @@ -462,8 +462,11 @@ def fetch_incidents(client: Client, params: dict):
last_run = demisto.getLastRun()
last_run = last_run if last_run else \
initialize_last_run(params.get('fetch_parameters', ''), params.get('first_fetch', ''))
demisto.debug("GenericSQL - Start fetching")
demisto.debug(f"GenericSQL - Last run: {json.dumps(last_run)}")
sql_query = create_sql_query(last_run, params.get('query', ''), params.get('column_name', ''),
params.get('max_fetch', FETCH_DEFAULT_LIMIT))
demisto.debug(f"GenericSQL - Query sent to the server: {sql_query}")
limit_fetch = len(last_run.get('ids', [])) + int(params.get('max_fetch', FETCH_DEFAULT_LIMIT))
bind_variables = generate_bind_variables_for_fetch(params.get('column_name', ''),
params.get('max_fetch', FETCH_DEFAULT_LIMIT), last_run)
Expand All @@ -478,6 +481,10 @@ def fetch_incidents(client: Client, params: dict):
if table:
last_run = update_last_run_after_fetch(table, last_run, params.get('fetch_parameters', ''),
params.get('column_name', ''), params.get('id_column', ''))
demisto.debug(f'GenericSQL - Next run after incidents fetching: {json.dumps(last_run)}')
demisto.debug(f"GenericSQL - Number of incidents before filtering: {len(result)}")
demisto.debug(f"GenericSQL - Number of incidents after filtering: {len(incidents)}")
demisto.debug(f"GenericSQL - Number of incidents skipped: {(len(result) - len(incidents))}")

demisto.info(f'last record now is: {last_run}, '
f'number of incidents fetched is {len(incidents)}')
Expand Down
2 changes: 1 addition & 1 deletion Packs/GenericSQL/Integrations/GenericSQL/GenericSQL.yml
Expand Up @@ -214,7 +214,7 @@ script:
description: Running a sql query
execution: false
name: sql-command
dockerimage: demisto/genericsql:1.1.0.61812
dockerimage: demisto/genericsql:1.1.0.62758
feed: false
isfetch: true
longRunning: false
Expand Down
7 changes: 7 additions & 0 deletions Packs/GenericSQL/ReleaseNotes/1_0_23.md
@@ -0,0 +1,7 @@

#### Integrations

##### Generic SQL

- Added support for fetch logs.
- Updated the Docker image to: *demisto/genericsql:1.1.0.62758*.
2 changes: 1 addition & 1 deletion Packs/GenericSQL/pack_metadata.json
Expand Up @@ -3,7 +3,7 @@
"description": "Connect and execute sql queries in 4 Databases: MySQL, PostgreSQL, Microsoft SQL Server and Oracle",
"support": "xsoar",
"serverMinVersion": "5.0.0",
"currentVersion": "1.0.22",
"currentVersion": "1.0.23",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down
Expand Up @@ -1035,7 +1035,7 @@ def standard_output(observable: Dict) -> Optional[Union[Common.Domain, Common.IP

Returns:
File, IP, URL or Domain object. If observable is not supported, will return None.
"""
""" # noqa: E501
file_keys = {
'fileHashType', 'fileHashValue', 'fileName', 'filePath', 'fileSize', 'fileType'
}
Expand Down Expand Up @@ -2770,7 +2770,7 @@ def get_file_data(file_response):
Returns:
dict. File's info
"""
file_data = assign_params(**{
file_data = assign_params(**{ # noqa: PIE804
'Sha1': file_response.get('sha1'),
'Size': file_response.get('size'),
'Sha256': file_response.get('sha256'),
Expand Down Expand Up @@ -3179,7 +3179,7 @@ def get_domain_statistics_context(domain_stat_response):
Returns:
(dict). domain statistics context
"""
domain_statistics = assign_params(**{
domain_statistics = assign_params(**{ # noqa: PIE804
"Host": domain_stat_response.get('host'),
"OrgPrevalence": domain_stat_response.get('orgPrevalence'),
"OrgFirstSeen": domain_stat_response.get('orgFirstSeen'),
Expand Down Expand Up @@ -3287,7 +3287,7 @@ def get_machine_data(machine):
Returns:
dict. Machine's info
"""
machine_data = assign_params(**{
machine_data = assign_params(**{ # noqa: PIE804
'ID': machine.get('id'),
'ComputerDNSName': machine.get('computerDnsName'),
'FirstSeen': machine.get('firstSeen'),
Expand Down Expand Up @@ -3338,7 +3338,7 @@ def get_file_statistics_context(file_stat_response):
Returns:
(dict). File statistics context
"""
file_stat = assign_params(**{
file_stat = assign_params(**{ # noqa: PIE804
"OrgPrevalence": file_stat_response.get('orgPrevalence'),
"OrgFirstSeen": file_stat_response.get('orgFirstSeen'),
"OrgLastSeen": file_stat_response.get('orgLastSeen'),
Expand Down Expand Up @@ -3398,7 +3398,7 @@ def get_ip_statistics_context(ip_statistics_response):
Returns:
(dict). IP statistics context
"""
ip_statistics = assign_params(**{
ip_statistics = assign_params(**{ # noqa: PIE804
"OrgPrevalence": ip_statistics_response.get('orgPrevalence'),
"OrgFirstSeen": ip_statistics_response.get('orgFirstSeen'),
"OrgLastSeen": ip_statistics_response.get('orgLastSeen')
Expand Down Expand Up @@ -3531,11 +3531,15 @@ def add_remove_machine_tag_command(client: MsClient, args: dict):


def fetch_incidents(client: MsClient, last_run, fetch_evidence):

demisto.debug("Microsoft-ATP - Start fetching")

first_fetch_time = dateparser.parse(client.alert_time_to_fetch,
settings={'RETURN_AS_TIMEZONE_AWARE': True, 'TIMEZONE': 'UTC'})
demisto.debug(f'First fetch time: {first_fetch_time}')

if last_run:
demisto.debug(f"Microsoft-ATP - Last run: {json.dumps(last_run)}")
last_fetch_time = last_run.get('last_alert_fetched_time')
last_fetch_time = datetime.strftime(parse_date_string(last_fetch_time) + timedelta(milliseconds=1), TIME_FORMAT)
# handling old version of time format:
Expand All @@ -3544,13 +3548,14 @@ def fetch_incidents(client: MsClient, last_run, fetch_evidence):

else:
last_fetch_time = datetime.strftime(first_fetch_time, TIME_FORMAT) # type: ignore
demisto.debug(f"Microsoft-ATP - Last run: {last_fetch_time}")

latest_created_time = dateparser.parse(last_fetch_time,
settings={'RETURN_AS_TIMEZONE_AWARE': True, 'TIMEZONE': 'UTC'})
demisto.debug(f'latest_created_time: {latest_created_time}')

params = _get_incidents_query_params(client, fetch_evidence, last_fetch_time)
demisto.debug(f'getting alerts using {params=}')
demisto.debug(f"Microsoft-ATP - Query sent to the server: {params}")
incidents = []
# get_alerts:
try:
Expand All @@ -3565,6 +3570,7 @@ def fetch_incidents(client: MsClient, last_run, fetch_evidence):
f'Try using a lower limit.')
demisto.debug(f'Query crashed API. Params sent to query: {params}')
raise err
skipped_incidents = 0

for alert in alerts:
alert_time = dateparser.parse(alert['alertCreationTime'],
Expand All @@ -3574,8 +3580,9 @@ def fetch_incidents(client: MsClient, last_run, fetch_evidence):
parsed = dateparser.parse(last_fetch_time, settings={'RETURN_AS_TIMEZONE_AWARE': True, 'TIMEZONE': 'UTC'})
demisto.debug(f'Checking alert {alert["id"]} with parsed time {parsed}. last alert time is {alert_time}')
if alert_time <= parsed: # type: ignore
demisto.debug(f"{INTEGRATION_NAME} - alert {str(alert)} was created at {alert['alertCreationTime']}."
f' Skipping.')
skipped_incidents += 1
demisto.debug(f'Microsoft - ATP - Skipping incident with id={alert["id"]} with time {alert_time} because its'
' creation time is smaller than the last fetch.')
continue
demisto.debug(f'Adding alert {alert["id"]}')
incidents.append({
Expand All @@ -3591,7 +3598,10 @@ def fetch_incidents(client: MsClient, last_run, fetch_evidence):
latest_created_time = alert_time # type: ignore

# last alert is the newest as we ordered by it ascending
demisto.debug(f'got {len(incidents)} incidents from the API.')
demisto.debug(f'Microsoft-ATP - Next run after incidents fetching: {latest_created_time}')
demisto.debug(f"Microsoft-ATP - Number of incidents before filtering: {len(alerts)}")
demisto.debug(f"Microsoft-ATP - Number of incidents after filtering: {len(incidents)}")
demisto.debug(f"Microsoft-ATP - Number of incidents skipped: {skipped_incidents}")
last_run['last_alert_fetched_time'] = datetime.strftime(latest_created_time, TIME_FORMAT) # type: ignore
return incidents, last_run

Expand Down Expand Up @@ -3888,7 +3898,7 @@ def create_network_indicator_command(client, args) -> Tuple[str, Dict, Dict]:

Raises:
AssertionError: If no file arguments.
"""
""" # noqa: E501
network_object = assign_params(
domainName=args.get('domain_name'),
networkCidrBlock=args.get('network_cidr_block'),
Expand Down
Expand Up @@ -6689,7 +6689,7 @@ script:
execution: false
name: microsoft-atp-generate-login-url
arguments: []
dockerimage: demisto/crypto:1.0.0.61689
dockerimage: demisto/crypto:1.0.0.62834
feed: false
isfetch: true
longRunning: false
Expand Down
@@ -0,0 +1,8 @@

#### Integrations

##### Microsoft Defender for Endpoint

- Added support for fetch logs.
- Updated the Docker image to: *demisto/crypto:1.0.0.62834*.

Expand Up @@ -2,7 +2,7 @@
"name": "Microsoft Defender for Endpoint",
"description": "Microsoft Defender for Endpoint (previously Microsoft Defender Advanced Threat Protection (ATP)) is a unified platform for preventative protection, post-breach detection, automated investigation, and response.",
"support": "xsoar",
"currentVersion": "1.15.23",
"currentVersion": "1.15.24",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down