Skip to content

Commit

Permalink
fix: stabilize E2E Vulnerability tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Rebits committed Jan 17, 2024
1 parent 52b70be commit f5c96e2
Show file tree
Hide file tree
Showing 12 changed files with 1,015 additions and 729 deletions.
9 changes: 9 additions & 0 deletions deps/wazuh_testing/wazuh_testing/end_to_end/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,12 @@ def truncate_remote_host_group_files(host_manager: HostManager, host_group: str,
log_file_path = ALERTS_JSON_PATH

host_manager.truncate_file(host, log_file_path)


def get_hosts_logs(host_manager: HostManager, host_group: str = 'all') -> dict:
host_logs = {}
for host in host_manager.get_group_hosts(host_group):
host_os_name = host_manager.get_host_variables(host)['os_name']
host_logs[host] = host_manager.get_file_content(host, logs_filepath_os[host_os_name])

return host_logs
42 changes: 30 additions & 12 deletions deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict
host_manager: An instance of the HostManager class containing information about hosts.
monitoring_data: A dictionary containing monitoring data for each host.
"""
def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 5,
def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 20,
ignore_error=False):
"""
Monitor the specified elements on a host.
Expand All @@ -50,15 +50,21 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements:
TimeoutError: If no match is found within the specified timeout.
"""
elements_not_found = []
elements_found = []

for element in monitoring_elements:
regex, timeout, monitoring_file = element['regex'], element['timeout'], element['file']
regex, timeout, monitoring_file, n_iterations = element['regex'], element['timeout'], element['file'], \
element['n_iterations']
current_timeout = 0
regex_match = None

while current_timeout < timeout:
file_content = host_manager.get_file_content(host, monitoring_file)
regex_match = re.search(regex, file_content)
if regex_match:

match_regex = re.findall(regex, file_content)
if match_regex and len(list(match_regex)) >= n_iterations:
elements_found = list(match_regex)
regex_match = True
break

sleep(scan_interval)
Expand All @@ -70,10 +76,16 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements:
if not ignore_error:
raise TimeoutError(f"Element not found: {element}")

host_elements_not_found = {}
host_elements_not_found[host] = elements_not_found
monitoring_result = {}

if host not in monitoring_result:
monitoring_result[host] = {}

monitoring_result[host]['not_found'] = elements_not_found

return host_elements_not_found
monitoring_result[host]['found'] = elements_found

return monitoring_result

with ThreadPoolExecutor() as executor:
futures = []
Expand All @@ -91,7 +103,8 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements:
return results


def generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: list, timeout_list: list) -> dict:
def generate_monitoring_logs(host_manager: HostManager, regex_list: list, timeout_list: list, hosts: list,
n_iterations=1) -> dict:
"""
Generate monitoring data for logs on all agent hosts.
Expand All @@ -104,19 +117,21 @@ def generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: li
dict: Monitoring data for logs on all agent hosts.
"""
monitoring_data = {}
for agent in host_manager.get_group_hosts('agent'):
for agent in hosts:
monitoring_data[agent] = []
for index, regex_index in enumerate(regex_list):
os_name = host_manager.get_host_variables(agent)['os_name']
monitoring_data[agent].append({
'regex': regex_index,
'file': logs_filepath_os[os_name],
'timeout': timeout_list[index]
'timeout': timeout_list[index],
'n_iterations': n_iterations
})
return monitoring_data


def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int) -> dict:
def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int,
n_iterations: int = 1) -> dict:
"""
Generate monitoring data for logs on a specific manager host.
Expand All @@ -134,8 +149,10 @@ def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, re
monitoring_data[manager] = [{
'regex': regex,
'file': logs_filepath_os[os_name],
'timeout': timeout
'timeout': timeout,
'n_iterations': n_iterations
}]

return monitoring_data


Expand Down Expand Up @@ -165,6 +182,7 @@ def generate_monitoring_alerts_all_agent(host_manager: HostManager, events_metad
'regex': get_event_regex(event),
'file': '/var/ossec/logs/alerts/alerts.json',
'timeout': 120,
'n_iterations': 1
}
if 'parameters' in metadata_agent:
monitoring_element['parameters'] = metadata_agent['parameters']
Expand Down
4 changes: 2 additions & 2 deletions deps/wazuh_testing/wazuh_testing/end_to_end/regex.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@

REGEX_PATTERNS = {
'syscollector_scan_start': {
'regex': '.*INFO: Starting evaluation.'
'regex': r'(\d{4}\/\d{2}\/\d{2} \d{2}:\d{2}:\d{2}) .*? INFO: Starting evaluation'
},
'syscollector_scan_end': {
'regex': '.*INFO: Evaluation finished.'
'regex': r'(\d{4}\/\d{2}\/\d{2} \d{2}:\d{2}:\d{2}) .*? INFO: Evaluation finished'
},
'syscollector_install_package_alert_yum': {
'regex': '.*installed.*agent".*"name":"(\\S+)".*Installed: (\\S+).*?(\\S+)',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,12 @@
from typing import Dict, List
from multiprocessing.pool import ThreadPool
from datetime import datetime, timezone
import logging

from wazuh_testing.end_to_end.indexer_api import get_indexer_values
from wazuh_testing.tools.system import HostManager
from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities
from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost
from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost
from wazuh_testing.end_to_end.waiters import wait_until_vuln_scan_agents_finished
from wazuh_testing.end_to_end.regex import get_event_regex
from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files
Expand All @@ -48,18 +49,19 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man
Raises:
ValueError: If the specified operation is not recognized.
"""
logging.critical(f"Launching remote operation: {operation_data}")

host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0]
host_os_arch = host_manager.get_host_variables(host)['architecture']
system = host_manager.get_host_variables(host)['os_name']
operation = operation_data['operation']


print("Performing remote operations")

if system == 'linux':
system = host_manager.get_host_variables(host)['os'].split('_')[0]

if operation == 'install_package':
logging.critical(f"Installing package on {host}")

package_data = operation_data['package']
package_url = package_data[host_os_name][host_os_arch]

Expand All @@ -69,24 +71,33 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man
else:
host_manager.install_package(host, package_url, system)

logging.critical(f"Package installed on {host}")
logging.critical(f"Waiting for syscollector scan to finish on {host}")

TIMEOUT_SYSCOLLECTOR_SCAN = 80

TIMEOUT_SYSCOLLECTOR_SCAN = 60
truncate_remote_host_group_files(host_manager, 'agent', 'logs')

# Wait until syscollector
monitoring_data = generate_monitoring_logs_all_agent(host_manager,
[get_event_regex({'event': 'syscollector_scan_start'}),
get_event_regex({'event': 'syscollector_scan_end'})],
[TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN])
monitoring_data = generate_monitoring_logs(host_manager,
[get_event_regex({'event': 'syscollector_scan_start'}),
get_event_regex({'event': 'syscollector_scan_end'})],
[TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN],
host_manager.get_group_hosts('agent'))

monitoring_events_multihost(host_manager, monitoring_data)
result = monitoring_events_multihost(host_manager, monitoring_data)

logging.critical(f"Syscollector scan finished with result: {result}")

truncate_remote_host_group_files(host_manager, 'manager', 'logs')

logging.critical(f"Waiting for vulnerability scan to finish on {host}")

# Wait until VD scan
wait_until_vuln_scan_agents_finished(host_manager)

elif operation == 'remove_package':
logging.critical(f"Removing package on {host}")
package_data = operation_data['package']
package_name = package_data[host_os_name][host_os_arch]
host_manager.remove_package(host, package_name, system)
Expand All @@ -95,10 +106,10 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man

truncate_remote_host_group_files(host_manager, 'agent', 'logs')
# Wait until syscollector
monitoring_data = generate_monitoring_logs_all_agent(host_manager,
monitoring_data = generate_monitoring_logs(host_manager,
[get_event_regex({'event': 'syscollector_scan_start'}),
get_event_regex({'event': 'syscollector_scan_end'})],
[TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN])
[TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], host_manager.get_group_hosts('agent'))

monitoring_events_multihost(host_manager, monitoring_data)

Expand All @@ -108,11 +119,23 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man
wait_until_vuln_scan_agents_finished(host_manager)

elif operation == 'check_agent_vulnerability':
logging.critical(f"Checking agent vulnerability on {host}")

results = {
"alerts_not_found": [],
"states_not_found": []
}

if operation_data['parameters']['alert_indexed']:
check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data'], current_datetime)
logging.critical(f'Checking vulnerability alerts in the indexer for {host}')
results["alerts_not_found"] = check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data'], current_datetime)

if operation_data['parameters']['state_indice']:
check_vuln_state_index(host_manager, operation_data['vulnerability_data'], current_datetime)
logging.critical(f'Checking vulnerability state index for {host}')
results["states_not_found"] = check_vuln_state_index(host_manager, operation_data['vulnerability_data'], current_datetime)

assert len(results["alerts_not_found"]) == 0 and len(results["states_not_found"]) == 0, \
f"Vulnerability alerts or states not found for {host}: {results}"


def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager):
Expand Down Expand Up @@ -141,6 +164,7 @@ def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager)
host_manager (HostManager): An instance of the HostManager class containing information about hosts.
"""
for task in task_list:
logging.critical(f"Launching parallel task: {task}")
parallel_configuration = []
target = task['target']

Expand Down

0 comments on commit f5c96e2

Please sign in to comment.