Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions tests/integration-tests/configs/common/common.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -409,6 +409,16 @@ storage:
instances: {{ common.INSTANCES_DEFAULT_X86 }}
oss: {{ common.OSS_BATCH }}
schedulers: ["awsbatch"]
test_efs.py::test_existing_efs:
dimensions:
- regions: ["ap-northeast-2"]
instances: {{ common.INSTANCES_DEFAULT_X86 }}
oss: ["alinux2"]
schedulers: ["awsbatch"]
- regions: ["ap-northeast-2"]
instances: {{ common.INSTANCES_DEFAULT_X86 }}
oss: ["centos8"]
schedulers: ["slurm"]
test_raid.py::test_raid_fault_tolerance_mode:
dimensions:
- regions: ["cn-northwest-1"]
Expand Down
15 changes: 6 additions & 9 deletions tests/integration-tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
from utils import (
create_s3_bucket,
delete_s3_bucket,
generate_stack_name,
get_architecture_supported_by_instance_type,
get_vpc_snakecase_value,
random_alphanumeric,
Expand Down Expand Up @@ -329,7 +330,7 @@ def test_datadir(request, datadir):


@pytest.fixture()
def pcluster_config_reader(test_datadir, vpc_stacks, region, request):
def pcluster_config_reader(test_datadir, vpc_stack, region, request):
"""
Define a fixture to render pcluster config templates associated to the running test.

Expand All @@ -349,7 +350,7 @@ def _config_renderer(config_file="pcluster.config.ini", **kwargs):
config_file_path = test_datadir / config_file
if not os.path.isfile(config_file_path):
raise FileNotFoundError(f"Cluster config file not found in the expected dir {config_file_path}")
default_values = _get_default_template_values(vpc_stacks, region, request)
default_values = _get_default_template_values(vpc_stack, request)
file_loader = FileSystemLoader(str(test_datadir))
env = Environment(loader=file_loader)
rendered_template = env.get_template(config_file).render(**{**kwargs, **default_values})
Expand Down Expand Up @@ -441,9 +442,9 @@ def _enable_sanity_check_if_unset(cluster_config):
config.write(f)


def _get_default_template_values(vpc_stacks, region, request):
def _get_default_template_values(vpc_stack, request):
"""Build a dictionary of default values to inject in the jinja templated cluster configs."""
default_values = get_vpc_snakecase_value(region, vpc_stacks)
default_values = get_vpc_snakecase_value(vpc_stack)
default_values.update({dimension: request.node.funcargs.get(dimension) for dimension in DIMENSIONS_MARKER_ARGS})
default_values["key_name"] = request.config.getoption("key_name")

Expand Down Expand Up @@ -635,11 +636,7 @@ def _create_vpc_stack(request, template, region, cfn_stacks_factory):
stack = CfnStack(name=request.config.getoption("vpc_stack"), region=region, template=template.to_json())
else:
stack = CfnStack(
name="integ-tests-vpc-{0}{1}{2}".format(
random_alphanumeric(),
"-" if request.config.getoption("stackname_suffix") else "",
request.config.getoption("stackname_suffix"),
),
name=generate_stack_name("integ-tests-vpc", request.config.getoption("stackname_suffix")),
region=region,
template=template.to_json(),
)
Expand Down
8 changes: 2 additions & 6 deletions tests/integration-tests/tests/networking/test_networking.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import pytest
from assertpy import assert_that
from cfn_stacks_factory import CfnStack, CfnStacksFactory
from utils import random_alphanumeric
from utils import generate_stack_name


@pytest.fixture()
Expand All @@ -26,11 +26,7 @@ def networking_stack_factory(request):
def _create_network(region, template_path, parameters):
file_content = extract_template(template_path)
stack = CfnStack(
name="integ-tests-networking-{0}{1}{2}".format(
random_alphanumeric(),
"-" if request.config.getoption("stackname_suffix") else "",
request.config.getoption("stackname_suffix"),
),
name=generate_stack_name("integ-tests-networking", request.config.getoption("stackname_suffix")),
region=region,
template=file_content,
parameters=parameters,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from cfn_stacks_factory import CfnStack
from troposphere import Ref, Template
from troposphere.ec2 import SecurityGroup, SecurityGroupIngress
from utils import check_headnode_security_group, random_alphanumeric
from utils import check_headnode_security_group, generate_stack_name


@pytest.mark.usefixtures("os", "scheduler", "instance")
Expand Down Expand Up @@ -118,11 +118,7 @@ def custom_security_group(vpc_stack, region, request, cfn_stacks_factory):
)
)
stack = CfnStack(
name="integ-tests-custom-sg-{0}{1}{2}".format(
random_alphanumeric(),
"-" if request.config.getoption("stackname_suffix") else "",
request.config.getoption("stackname_suffix"),
),
name=generate_stack_name("integ-tests-custom-sg", request.config.getoption("stackname_suffix")),
region=region,
template=template.to_json(),
)
Expand Down
148 changes: 141 additions & 7 deletions tests/integration-tests/tests/storage/test_efs.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,15 @@
import boto3
import pytest
from assertpy import assert_that
from cfn_stacks_factory import CfnStack
from remote_command_executor import RemoteCommandExecutor
from utils import get_vpc_snakecase_value
from troposphere import Base64, Sub, Template
from troposphere.ec2 import Instance
from troposphere.efs import FileSystem, MountTarget
from utils import generate_stack_name, get_vpc_snakecase_value, random_alphanumeric

from tests.common.schedulers_common import get_scheduler_commands
from tests.common.utils import retrieve_latest_ami
from tests.storage.storage_common import verify_directory_correctly_shared


Expand All @@ -28,13 +33,13 @@
@pytest.mark.schedulers(["slurm", "awsbatch"])
@pytest.mark.oss(["alinux2"])
@pytest.mark.usefixtures("region", "os", "instance")
def test_efs_compute_az(region, scheduler, pcluster_config_reader, clusters_factory, vpc_stacks):
def test_efs_compute_az(region, scheduler, pcluster_config_reader, clusters_factory, vpc_stack):
"""
Test when compute subnet is in a different AZ from master subnet.

A compute mount target should be created and the efs correctly mounted on compute.
"""
_assert_subnet_az_relations(region, vpc_stacks, expected_in_same_az=False)
_assert_subnet_az_relations(region, vpc_stack, expected_in_same_az=False)
mount_dir = "efs_mount_dir"
cluster_config = pcluster_config_reader(mount_dir=mount_dir)
cluster = clusters_factory(cluster_config)
Expand All @@ -50,13 +55,13 @@ def test_efs_compute_az(region, scheduler, pcluster_config_reader, clusters_fact
@pytest.mark.instances(["c4.xlarge", "c5.xlarge"])
@pytest.mark.schedulers(["slurm", "awsbatch"])
@pytest.mark.usefixtures("region", "os", "instance")
def test_efs_same_az(region, scheduler, pcluster_config_reader, clusters_factory, vpc_stacks):
def test_efs_same_az(region, scheduler, pcluster_config_reader, clusters_factory, vpc_stack):
"""
Test when compute subnet is in the same AZ as master subnet.

No compute mount point needed and the efs correctly mounted on compute.
"""
_assert_subnet_az_relations(region, vpc_stacks, expected_in_same_az=True)
_assert_subnet_az_relations(region, vpc_stack, expected_in_same_az=True)
mount_dir = "efs_mount_dir"
cluster_config = pcluster_config_reader(mount_dir=mount_dir)
cluster = clusters_factory(cluster_config)
Expand All @@ -68,6 +73,135 @@ def test_efs_same_az(region, scheduler, pcluster_config_reader, clusters_factory
_test_efs_correctly_shared(remote_command_executor, mount_dir, scheduler_commands)


@pytest.mark.usefixtures("os", "instance")
def test_existing_efs(
region,
scheduler,
efs_stack,
pcluster_config_reader,
clusters_factory,
vpc_stack,
request,
key_name,
cfn_stacks_factory,
):
"""
Test when efs_fs_id is provided in the config file, the existing efs can be correctly mounted.

To verify the efs is the existing efs, the test expects a file with random ran inside the efs mounted
"""
file_name = _write_file_into_efs(region, vpc_stack, efs_stack, request, key_name, cfn_stacks_factory)

_assert_subnet_az_relations(region, vpc_stack, expected_in_same_az=False)
mount_dir = "/efs_mount_dir"
cluster_config = pcluster_config_reader(
mount_dir=mount_dir, efs_fs_id=efs_stack.cfn_resources["FileSystemResource"]
)
cluster = clusters_factory(cluster_config)
remote_command_executor = RemoteCommandExecutor(cluster)

# test file in efs exist
logging.info("Testing efs {0} is correctly mounted".format(mount_dir))
result = remote_command_executor.run_remote_command("df | grep '{0}'".format(mount_dir))
assert_that(result.stdout).contains(mount_dir)

remote_command_executor.run_remote_command(f"cat {mount_dir}/{file_name}")
scheduler_commands = get_scheduler_commands(scheduler, remote_command_executor)
_test_efs_correctly_mounted(remote_command_executor, mount_dir)
_test_efs_correctly_shared(remote_command_executor, mount_dir, scheduler_commands)
remote_command_executor.run_remote_command(f"cat {mount_dir}/{file_name}")


@pytest.fixture(scope="class")
def efs_stack(cfn_stacks_factory, request, region):
"""EFS stack contains a single efs resource."""
efs_template = Template()
efs_template.set_version("2010-09-09")
efs_template.set_description("EFS stack created for testing existing EFS")
efs_template.add_resource(FileSystem("FileSystemResource"))
stack = CfnStack(
name=generate_stack_name("integ-tests-efs", request.config.getoption("stackname_suffix")),
region=region,
template=efs_template.to_json(),
)
cfn_stacks_factory.create_stack(stack)

yield stack

if not request.config.getoption("no_delete"):
cfn_stacks_factory.delete_stack(stack.name, region)


def _write_file_into_efs(region, vpc_stack, efs_stack, request, key_name, cfn_stacks_factory):
"""Write file stack contains a mount target and a instance to write a empty file with random name into the efs."""
write_file_template = Template()
write_file_template.set_version("2010-09-09")
write_file_template.set_description("Stack to write a file to the existing EFS")
default_security_group_id = (
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

minor: default_security_group --> efs_security_group

boto3.client("ec2", region_name=region)
.describe_security_groups(
Filters=[
{"Name": "vpc-id", "Values": [vpc_stack.cfn_outputs["VpcId"]]},
{"Name": "group-name", "Values": ["default"]},
]
)
.get("SecurityGroups")[0]
.get("GroupId")
)
write_file_template.add_resource(
MountTarget(
"MountTargetResource",
FileSystemId=efs_stack.cfn_resources["FileSystemResource"],
SubnetId=vpc_stack.cfn_outputs["PublicSubnetId"],
SecurityGroups=[default_security_group_id],
)
)
random_file_name = random_alphanumeric()
user_data = (
"""
#cloud-config
package_update: true
package_upgrade: true
runcmd:
- yum install -y amazon-efs-utils
- yum install -y nfs-utils
- file_system_id_1="""
+ efs_stack.cfn_resources["FileSystemResource"]
+ """
- efs_mount_point_1=/mnt/efs/fs1
- mkdir -p "${!efs_mount_point_1}"
- mount -t efs ${!file_system_id_1}:/ ${!efs_mount_point_1}
- touch ${!efs_mount_point_1}/"""
+ random_file_name
+ """
- umount ${!efs_mount_point_1}
- opt/aws/bin/cfn-signal -e $? --stack ${AWS::StackName} --resource InstanceToWriteEFS --region ${AWS::Region}
"""
)
write_file_template.add_resource(
Instance(
"InstanceToWriteEFS",
CreationPolicy={"ResourceSignal": {"Timeout": "PT10M"}},
ImageId=retrieve_latest_ami(region, "alinux2"),
InstanceType="c5.xlarge",
SubnetId=vpc_stack.cfn_outputs["PublicSubnetId"],
UserData=Base64(Sub(user_data)),
KeyName=key_name,
DependsOn=["MountTargetResource"],
)
)
write_file_stack = CfnStack(
name=generate_stack_name("integ-tests-efs-write-file", request.config.getoption("stackname_suffix")),
region=region,
template=write_file_template.to_json(),
)
cfn_stacks_factory.create_stack(write_file_stack)

cfn_stacks_factory.delete_stack(write_file_stack.name, region)

return random_file_name


def _test_efs_correctly_shared(remote_command_executor, mount_dir, scheduler_commands):
logging.info("Testing efs correctly mounted on compute nodes")
verify_directory_correctly_shared(remote_command_executor, mount_dir, scheduler_commands)
Expand All @@ -87,8 +221,8 @@ def _test_efs_correctly_mounted(remote_command_executor, mount_dir):
)


def _assert_subnet_az_relations(region, vpc_stacks, expected_in_same_az):
vpc = get_vpc_snakecase_value(region, vpc_stacks)
def _assert_subnet_az_relations(region, vpc_stack, expected_in_same_az):
vpc = get_vpc_snakecase_value(vpc_stack)
master_subnet_id = vpc["public_subnet_id"]
compute_subnet_id = vpc["private_subnet_id"] if expected_in_same_az else vpc["private_additional_cidr_subnet_id"]
master_subnet_az = boto3.resource("ec2", region_name=region).Subnet(master_subnet_id).availability_zone
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
[global]
cluster_template = default

[aws]
aws_region_name = {{ region }}

[cluster default]
base_os = {{ os }}
key_name = {{ key_name }}
vpc_settings = parallelcluster-vpc
scheduler = {{ scheduler }}
master_instance_type = {{ instance }}
compute_instance_type = {{ instance }}
{% if scheduler == "awsbatch" %}
min_vcpus = 4
desired_vcpus = 4
{% else %}
initial_queue_size = 1
maintain_initial_size = true
{% endif %}
efs_settings = efs

[vpc parallelcluster-vpc]
vpc_id = {{ vpc_id }}
master_subnet_id = {{ public_subnet_id }}
# This compute subnet would be in a different AZ than master for regions defined in AVAILABILITY_ZONE_OVERRIDES
# See conftest for details
compute_subnet_id = {{ private_additional_cidr_subnet_id }}
use_public_ips = false

[efs efs]
efs_fs_id = {{ efs_fs_id }}
shared_dir = {{ mount_dir }}
14 changes: 11 additions & 3 deletions tests/integration-tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,15 @@ def run_command(command, capture_output=True, log_error=True, env=None, timeout=
return result


def generate_stack_name(prefix, suffix):
"""Generate a stack name with prefix, suffix, and a random string in the middle"""
return prefix + "-{0}{1}{2}".format(
random_alphanumeric(),
"-" if suffix else "",
suffix,
)


def random_alphanumeric(size=16):
"""Generate a random alphanumeric string."""
return "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(size))
Expand Down Expand Up @@ -304,11 +313,10 @@ def paginate_boto3(method, **kwargs):
yield result


def get_vpc_snakecase_value(region, vpc_stacks):
def get_vpc_snakecase_value(vpc_stack):
"""Return dict containing snakecase vpc variables."""
vpc_output_dict = {}
vpc = vpc_stacks[region]
for key, value in vpc.cfn_outputs.items():
for key, value in vpc_stack.cfn_outputs.items():
vpc_output_dict[to_snake_case(key)] = value
return vpc_output_dict

Expand Down