diff --git a/.buildkite/common.py b/.buildkite/common.py index c33acee9b49..7119b79ee76 100644 --- a/.buildkite/common.py +++ b/.buildkite/common.py @@ -13,14 +13,14 @@ import subprocess from pathlib import Path -DEFAULT_INSTANCES = [ - "c5n.metal", # Intel Skylake - "m5n.metal", # Intel Cascade Lake - "m6i.metal", # Intel Icelake - "m6a.metal", # AMD Milan - "m6g.metal", # Graviton2 - "m7g.metal", # Graviton3 -] +DEFAULT_INSTANCES = { + "c5n.metal": "x86_64", # Intel Skylake + "m5n.metal": "x86_64", # Intel Cascade Lake + "m6i.metal": "x86_64", # Intel Icelake + "m6a.metal": "x86_64", # AMD Milan + "m6g.metal": "aarch64", # Graviton2 + "m7g.metal": "aarch64", # Graviton3 +} DEFAULT_PLATFORMS = [ ("al2", "linux_5.10"), @@ -146,7 +146,7 @@ def __call__(self, parser, namespace, value, option_string=None): "--instances", required=False, nargs="+", - default=DEFAULT_INSTANCES, + default=DEFAULT_INSTANCES.keys(), ) COMMON_PARSER.add_argument( "--platforms", @@ -233,7 +233,7 @@ class BKPipeline: parser = COMMON_PARSER - def __init__(self, initial_steps=None, with_build_step=True, **kwargs): + def __init__(self, with_build_step=True, **kwargs): self.steps = [] self.args = args = self.parser.parse_args() # Retry one time if agent was lost. This can happen if we terminate the @@ -258,33 +258,22 @@ def __init__(self, initial_steps=None, with_build_step=True, **kwargs): # Build sharing if with_build_step: build_cmds, self.shared_build = shared_build() - step_build = group("đŸ—ī¸ Build", build_cmds, **self.per_arch) - self.steps += [step_build, "wait"] + self.build_group_per_arch( + "đŸ—ī¸ Build", build_cmds, depends_on_build=False, key_prefix="build" + ) else: self.shared_build = None - # If we run initial_steps before the "wait" step above, then a failure of the initial steps - # would result in the build not progressing past the "wait" step (as buildkite only proceeds past a wait step - # if everything before it passed). Thus put the initial steps after the "wait" step, but set `"depends_on": null` - # to start running them immediately (e.g. without waiting for the "wait" step to unblock). - # - # See also https://buildkite.com/docs/pipelines/dependencies#explicit-dependencies-in-uploaded-steps - if initial_steps: - for step in initial_steps: - step["depends_on"] = None - - self.steps += initial_steps - - def add_step(self, step, decorate=True): + def add_step(self, step, depends_on_build=True): """ Add a step to the pipeline. https://buildkite.com/docs/pipelines/step-reference :param step: a Buildkite step - :param decorate: inject needed commands for sharing builds + :param depends_on_build: inject needed commands for sharing builds """ - if decorate and isinstance(step, dict): + if depends_on_build and isinstance(step, dict): step = self._adapt_group(step) self.steps.append(step) return step @@ -307,6 +296,10 @@ def _adapt_group(self, group): for step in group["steps"]: step["command"] = prepend + step["command"] + if self.shared_build is not None: + step["depends_on"] = ( + "build_" + DEFAULT_INSTANCES[step["agents"]["instance"]] + ) return group def build_group(self, *args, **kwargs): @@ -315,16 +308,30 @@ def build_group(self, *args, **kwargs): https://buildkite.com/docs/pipelines/group-step """ - decorate = kwargs.pop("decorate", True) + depends_on_build = kwargs.pop("depends_on_build", True) combined = overlay_dict(self.per_instance, kwargs) - return self.add_step(group(*args, **combined), decorate=decorate) + return self.add_step( + group(*args, **combined), depends_on_build=depends_on_build + ) - def build_group_per_arch(self, *args, **kwargs): + def build_group_per_arch(self, label, *args, **kwargs): """ Build a group, parametrizing over the architectures only. + + kwargs consumed by this method and not passed down to `group`: + - `depends_on_build` (default: `True`): Whether the steps in this group depend on the artifacts from the shared compilation steps + - `key_prefix`: If set, causes the generated steps to have a "key" field set to f"{key_prefix}_{$ARCH}". """ + depends_on_build = kwargs.pop("depends_on_build", True) + key_prefix = kwargs.pop("key_prefix", None) combined = overlay_dict(self.per_arch, kwargs) - return self.add_step(group(*args, **combined)) + grp = group(label, *args, **combined) + if key_prefix: + for step in grp["steps"]: + step["key"] = ( + key_prefix + "_" + DEFAULT_INSTANCES[step["agents"]["instance"]] + ) + return self.add_step(grp, depends_on_build=depends_on_build) def to_dict(self): """Render the pipeline as a dictionary.""" diff --git a/.buildkite/pipeline_cpu_template.py b/.buildkite/pipeline_cpu_template.py index 81497a51e3c..2a182ed6e4b 100755 --- a/.buildkite/pipeline_cpu_template.py +++ b/.buildkite/pipeline_cpu_template.py @@ -34,7 +34,7 @@ class BkStep(str, Enum): "tools/devtool -y test --no-build -- -m no_block_pr integration_tests/functional/test_cpu_template_helper.py -k test_guest_cpu_config_change", ], BkStep.LABEL: "đŸ–ī¸ fingerprint", - "instances": DEFAULT_INSTANCES, + "instances": DEFAULT_INSTANCES.keys(), "platforms": DEFAULT_PLATFORMS, }, "cpuid_wrmsr": { diff --git a/.buildkite/pipeline_pr.py b/.buildkite/pipeline_pr.py index 42ef986b0b5..552ad45b4ed 100755 --- a/.buildkite/pipeline_pr.py +++ b/.buildkite/pipeline_pr.py @@ -21,15 +21,18 @@ pipeline = BKPipeline( priority=DEFAULT_PRIORITY, timeout_in_minutes=45, - initial_steps=[ - { - "command": "./tools/devtool -y checkstyle", - "label": "đŸĒļ Style", - }, - ], with_build_step=not DOC_ONLY_CHANGE, ) +pipeline.add_step( + { + "command": "./tools/devtool -y checkstyle", + "label": "đŸĒļ Style", + "depends_on": None, + }, + depends_on_build=False, +) + # run sanity build of devtool if Dockerfile is changed if any(x.parent.name == "devctr" for x in changed_files): pipeline.build_group_per_arch( @@ -58,7 +61,7 @@ platforms=[("al2", "linux_5.10")], timeout_in_minutes=300, **DEFAULTS_PERF, - decorate=False, + depends_on_build=False, ) # modify Kani steps' label for step in kani_grp["steps"]: @@ -69,6 +72,7 @@ pipeline.build_group( "đŸ“Ļ Build", pipeline.devtool_test(pytest_opts="integration_tests/build/"), + depends_on_build=False, ) pipeline.build_group( diff --git a/tests/integration_tests/build/test_dependencies.py b/tests/integration_tests/build/test_dependencies.py deleted file mode 100644 index 3e2bcca8c20..00000000000 --- a/tests/integration_tests/build/test_dependencies.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: Apache-2.0 -"""Enforces controls over dependencies.""" - -import os - -import pytest - -from host_tools import proc -from host_tools.cargo_build import cargo - -pytestmark = pytest.mark.skipif( - "Intel" not in proc.proc_type(), reason="test only runs on Intel" -) - - -def test_licenses(): - """Ensure license compatibility for Firecracker. - - For a list of currently allowed licenses checkout deny.toml in - the root directory. - """ - toml_file = os.path.normpath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../../Cargo.toml") - ) - - _, stdout, stderr = cargo( - "deny", f"--manifest-path {toml_file} check licenses bans" - ) - assert "licenses ok" in stdout - - # "cargo deny" should deny licenses by default but for some reason copyleft is allowed - # by it and if we add a dependency which has copyleft licenses "cargo deny" won't report - # it unless it is explicitly told to do so from the deny.toml. - # Our current deny.toml seems to cover all the cases we need but, - # if there is an exception like copyleft (where we don't want and don't deny - # in deny.toml and is allowed by cardo deny), we don't want to be left in the dark. - # For such cases check "cargo deny" output, make sure that there are no warnings reported - # related to the license and take appropriate actions i.e. either add them to allow list - # or remove them if they are incompatible with our licenses. - license_res = [line for line in stderr.split("\n") if "license" in line] - assert not license_res diff --git a/tests/integration_tests/build/test_binary_size.py b/tests/integration_tests/functional/test_binary_size.py similarity index 100% rename from tests/integration_tests/build/test_binary_size.py rename to tests/integration_tests/functional/test_binary_size.py diff --git a/tests/integration_tests/build/test_binary_static_linking.py b/tests/integration_tests/functional/test_binary_static_linking.py similarity index 100% rename from tests/integration_tests/build/test_binary_static_linking.py rename to tests/integration_tests/functional/test_binary_static_linking.py diff --git a/tests/integration_tests/style/test_licenses.py b/tests/integration_tests/style/test_licenses.py index 0633ac4e104..c95181850f7 100644 --- a/tests/integration_tests/style/test_licenses.py +++ b/tests/integration_tests/style/test_licenses.py @@ -5,6 +5,8 @@ import datetime from framework import utils_repo +from framework.defs import FC_WORKSPACE_DIR +from host_tools.cargo_build import cargo AMAZON_COPYRIGHT_YEARS = range(2018, datetime.datetime.now().year + 1) AMAZON_COPYRIGHT = ( @@ -96,7 +98,6 @@ def _validate_license(filename): or has_intel_copyright or has_rivos_copyright ) - return True def test_for_valid_licenses(): @@ -116,5 +117,27 @@ def test_for_valid_licenses(): assert not error_msg, f"Files {error_msg} have invalid licenses" -if __name__ == "__main__": - test_for_valid_licenses() +def test_dependency_licenses(): + """Ensure license compatibility for Firecracker. + + For a list of currently allowed licenses checkout deny.toml in + the root directory. + """ + toml_file = FC_WORKSPACE_DIR / "Cargo.toml" + + _, stdout, stderr = cargo( + "deny", f"--manifest-path {toml_file} check licenses bans" + ) + assert "licenses ok" in stdout + + # "cargo deny" should deny licenses by default but for some reason copyleft is allowed + # by it and if we add a dependency which has copyleft licenses "cargo deny" won't report + # it unless it is explicitly told to do so from the deny.toml. + # Our current deny.toml seems to cover all the cases we need but, + # if there is an exception like copyleft (where we don't want and don't deny + # in deny.toml and is allowed by cardo deny), we don't want to be left in the dark. + # For such cases check "cargo deny" output, make sure that there are no warnings reported + # related to the license and take appropriate actions i.e. either add them to allow list + # or remove them if they are incompatible with our licenses. + license_res = [line for line in stderr.split("\n") if "license" in line] + assert not license_res