Skip to content

Commit

Permalink
Refactor and add kubelet tests
Browse files Browse the repository at this point in the history
  • Loading branch information
jeniawhite committed May 24, 2022
1 parent d0e12ab commit e17a451
Show file tree
Hide file tree
Showing 8 changed files with 1,614 additions and 4 deletions.
33 changes: 29 additions & 4 deletions tests/commonlib/io_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import shutil
from pathlib import Path
from munch import Munch, munchify
import time


def get_logs_from_stream(stream: str) -> list[Munch]:
Expand Down Expand Up @@ -148,6 +147,32 @@ def edit_process_file(container_name: str, dictionary, resource: str):
with current_resource.open(mode="w") as f:
yaml.dump(r_file, f)

# Wait for process reboot
# TODO: Implement a more optimal way of waiting
time.sleep(60)
@staticmethod
def edit_config_file(container_name: str, dictionary, resource: str):
if container_name == '':
raise Exception(f"Unknown {container_name} is sent")

current_resource = Path(resource)
if not current_resource.is_file():
raise Exception(
f"File {resource} does not exist or mount missing.")

with current_resource.open() as f:
r_file = yaml.safe_load(f)

set_dict = dictionary.get("set", {})
unset_list = dictionary.get("unset", [])

r_file = { **r_file, **set_dict }
for uskey in unset_list:
keys = uskey.split('.')
key_to_del = keys.pop()
p = r_file
for key in keys:
p = p.get(key, None)
if p is None:
break
if p:
del p[key_to_del]
with current_resource.open(mode="w") as f:
yaml.dump(r_file, f)
62 changes: 62 additions & 0 deletions tests/product/tests/test_process_api_server_rules.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
"""
Kubernetes CIS rules verification.
This module verifies correctness of retrieved findings by manipulating audit and remediation actions
"""
from datetime import datetime

import pytest
import time

from commonlib.utils import get_evaluation
from product.tests.tests.process.process_test_cases import *


@pytest.mark.rules
@pytest.mark.parametrize(
("rule_tag", "dictionary", "resource", "expected"),
api_server_rules,
)
def test_process_api_server(config_node_pre_test,
rule_tag,
dictionary,
resource,
expected):
"""
This data driven test verifies rules and findings return by cloudbeat agent.
In order to add new cases @pytest.mark.parameterize section shall be updated.
Setup and teardown actions are defined in data method.
This test creates cloudbeat agent instance, changes node resources (modes, users, groups) and verifies,
that cloudbeat returns correct finding.
@param rule_tag: Name of rule to be verified.
@param dictionary: Set and Unset dictionary
@param resource: Full path to resource / file
@param expected: Result to be found in finding evaluation field.
@return: None - Test Pass / Fail result is generated.
"""
k8s_client, api_client, cloudbeat_agent = config_node_pre_test

if not "edit_process_file" in dir(api_client):
pytest.skip("skipping process rules run in non-containerized api_client")

# Currently, single node is used, in the future may be extended for all nodes.
node = k8s_client.get_cluster_nodes()[0]
pods = k8s_client.get_agent_pod_instances(agent_name=cloudbeat_agent.name, namespace=cloudbeat_agent.namespace)

api_client.edit_process_file(container_name=node.metadata.name,
dictionary=dictionary,
resource=resource)

# Wait for process reboot
# TODO: Implement a more optimal way of waiting
time.sleep(60)

evaluation = get_evaluation(
k8s=k8s_client,
timeout=cloudbeat_agent.findings_timeout,
pod_name=pods[0].metadata.name,
namespace=cloudbeat_agent.namespace,
rule_tag=rule_tag,
exec_timestamp=datetime.utcnow()
)

assert evaluation == expected, f"Rule {rule_tag} verification failed."
62 changes: 62 additions & 0 deletions tests/product/tests/test_process_controller_manager_rules.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
"""
Kubernetes CIS rules verification.
This module verifies correctness of retrieved findings by manipulating audit and remediation actions
"""
from datetime import datetime

import pytest
import time

from commonlib.utils import get_evaluation
from product.tests.tests.process.process_test_cases import *


@pytest.mark.rules
@pytest.mark.parametrize(
("rule_tag", "dictionary", "resource", "expected"),
controller_manager_rules,
)
def test_process_controller_manager(config_node_pre_test,
rule_tag,
dictionary,
resource,
expected):
"""
This data driven test verifies rules and findings return by cloudbeat agent.
In order to add new cases @pytest.mark.parameterize section shall be updated.
Setup and teardown actions are defined in data method.
This test creates cloudbeat agent instance, changes node resources (modes, users, groups) and verifies,
that cloudbeat returns correct finding.
@param rule_tag: Name of rule to be verified.
@param dictionary: Set and Unset dictionary
@param resource: Full path to resource / file
@param expected: Result to be found in finding evaluation field.
@return: None - Test Pass / Fail result is generated.
"""
k8s_client, api_client, cloudbeat_agent = config_node_pre_test

if not "edit_process_file" in dir(api_client):
pytest.skip("skipping process rules run in non-containerized api_client")

# Currently, single node is used, in the future may be extended for all nodes.
node = k8s_client.get_cluster_nodes()[0]
pods = k8s_client.get_agent_pod_instances(agent_name=cloudbeat_agent.name, namespace=cloudbeat_agent.namespace)

api_client.edit_process_file(container_name=node.metadata.name,
dictionary=dictionary,
resource=resource)

# Wait for process reboot
# TODO: Implement a more optimal way of waiting
time.sleep(60)

evaluation = get_evaluation(
k8s=k8s_client,
timeout=cloudbeat_agent.findings_timeout,
pod_name=pods[0].metadata.name,
namespace=cloudbeat_agent.namespace,
rule_tag=rule_tag,
exec_timestamp=datetime.utcnow()
)

assert evaluation == expected, f"Rule {rule_tag} verification failed."
62 changes: 62 additions & 0 deletions tests/product/tests/test_process_etcd_rules.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
"""
Kubernetes CIS rules verification.
This module verifies correctness of retrieved findings by manipulating audit and remediation actions
"""
from datetime import datetime

import pytest
import time

from commonlib.utils import get_evaluation
from product.tests.tests.process.process_test_cases import *


@pytest.mark.rules
@pytest.mark.parametrize(
("rule_tag", "dictionary", "resource", "expected"),
etcd_rules,
)
def test_process_etcd(config_node_pre_test,
rule_tag,
dictionary,
resource,
expected):
"""
This data driven test verifies rules and findings return by cloudbeat agent.
In order to add new cases @pytest.mark.parameterize section shall be updated.
Setup and teardown actions are defined in data method.
This test creates cloudbeat agent instance, changes node resources (modes, users, groups) and verifies,
that cloudbeat returns correct finding.
@param rule_tag: Name of rule to be verified.
@param dictionary: Set and Unset dictionary
@param resource: Full path to resource / file
@param expected: Result to be found in finding evaluation field.
@return: None - Test Pass / Fail result is generated.
"""
k8s_client, api_client, cloudbeat_agent = config_node_pre_test

if not "edit_process_file" in dir(api_client):
pytest.skip("skipping process rules run in non-containerized api_client")

# Currently, single node is used, in the future may be extended for all nodes.
node = k8s_client.get_cluster_nodes()[0]
pods = k8s_client.get_agent_pod_instances(agent_name=cloudbeat_agent.name, namespace=cloudbeat_agent.namespace)

api_client.edit_process_file(container_name=node.metadata.name,
dictionary=dictionary,
resource=resource)

# Wait for process reboot
# TODO: Implement a more optimal way of waiting
time.sleep(60)

evaluation = get_evaluation(
k8s=k8s_client,
timeout=cloudbeat_agent.findings_timeout,
pod_name=pods[0].metadata.name,
namespace=cloudbeat_agent.namespace,
rule_tag=rule_tag,
exec_timestamp=datetime.utcnow()
)

assert evaluation == expected, f"Rule {rule_tag} verification failed."
62 changes: 62 additions & 0 deletions tests/product/tests/test_process_kubelet_rules.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
"""
Kubernetes CIS rules verification.
This module verifies correctness of retrieved findings by manipulating audit and remediation actions
"""
from datetime import datetime

import pytest
import time

from commonlib.utils import get_evaluation
from product.tests.tests.process.process_test_cases import *


@pytest.mark.rules
@pytest.mark.parametrize(
("rule_tag", "dictionary", "resource", "expected"),
kubelet_rules,
)
def test_process_kubelet(config_node_pre_test,
rule_tag,
dictionary,
resource,
expected):
"""
This data driven test verifies rules and findings return by cloudbeat agent.
In order to add new cases @pytest.mark.parameterize section shall be updated.
Setup and teardown actions are defined in data method.
This test creates cloudbeat agent instance, changes node resources (modes, users, groups) and verifies,
that cloudbeat returns correct finding.
@param rule_tag: Name of rule to be verified.
@param dictionary: Set and Unset dictionary
@param resource: Full path to resource / file
@param expected: Result to be found in finding evaluation field.
@return: None - Test Pass / Fail result is generated.
"""
k8s_client, api_client, cloudbeat_agent = config_node_pre_test

if not "edit_config_file" in dir(api_client):
pytest.skip("skipping process rules run in non-containerized api_client")

# Currently, single node is used, in the future may be extended for all nodes.
node = k8s_client.get_cluster_nodes()[0]
pods = k8s_client.get_agent_pod_instances(agent_name=cloudbeat_agent.name, namespace=cloudbeat_agent.namespace)

api_client.edit_config_file(container_name=node.metadata.name,
dictionary=dictionary,
resource=resource)

# Wait for updated file fetch
# TODO: Implement a more optimal way of waiting
time.sleep(60)

evaluation = get_evaluation(
k8s=k8s_client,
timeout=cloudbeat_agent.findings_timeout,
pod_name=pods[0].metadata.name,
namespace=cloudbeat_agent.namespace,
rule_tag=rule_tag,
exec_timestamp=datetime.utcnow()
)

assert evaluation == expected, f"Rule {rule_tag} verification failed."
62 changes: 62 additions & 0 deletions tests/product/tests/test_process_scheduler_rules.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
"""
Kubernetes CIS rules verification.
This module verifies correctness of retrieved findings by manipulating audit and remediation actions
"""
from datetime import datetime

import pytest
import time

from commonlib.utils import get_evaluation
from product.tests.tests.process.process_test_cases import *


@pytest.mark.rules
@pytest.mark.parametrize(
("rule_tag", "dictionary", "resource", "expected"),
scheduler_rules,
)
def test_process_scheduler(config_node_pre_test,
rule_tag,
dictionary,
resource,
expected):
"""
This data driven test verifies rules and findings return by cloudbeat agent.
In order to add new cases @pytest.mark.parameterize section shall be updated.
Setup and teardown actions are defined in data method.
This test creates cloudbeat agent instance, changes node resources (modes, users, groups) and verifies,
that cloudbeat returns correct finding.
@param rule_tag: Name of rule to be verified.
@param dictionary: Set and Unset dictionary
@param resource: Full path to resource / file
@param expected: Result to be found in finding evaluation field.
@return: None - Test Pass / Fail result is generated.
"""
k8s_client, api_client, cloudbeat_agent = config_node_pre_test

if not "edit_process_file" in dir(api_client):
pytest.skip("skipping process rules run in non-containerized api_client")

# Currently, single node is used, in the future may be extended for all nodes.
node = k8s_client.get_cluster_nodes()[0]
pods = k8s_client.get_agent_pod_instances(agent_name=cloudbeat_agent.name, namespace=cloudbeat_agent.namespace)

api_client.edit_process_file(container_name=node.metadata.name,
dictionary=dictionary,
resource=resource)

# Wait for process reboot
# TODO: Implement a more optimal way of waiting
time.sleep(60)

evaluation = get_evaluation(
k8s=k8s_client,
timeout=cloudbeat_agent.findings_timeout,
pod_name=pods[0].metadata.name,
namespace=cloudbeat_agent.namespace,
rule_tag=rule_tag,
exec_timestamp=datetime.utcnow()
)

assert evaluation == expected, f"Rule {rule_tag} verification failed."
Empty file.
Loading

0 comments on commit e17a451

Please sign in to comment.