Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 38 additions & 31 deletions .buildkite/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,14 @@
import subprocess
from pathlib import Path

DEFAULT_INSTANCES = [
"c5n.metal", # Intel Skylake
"m5n.metal", # Intel Cascade Lake
"m6i.metal", # Intel Icelake
"m6a.metal", # AMD Milan
"m6g.metal", # Graviton2
"m7g.metal", # Graviton3
]
DEFAULT_INSTANCES = {
"c5n.metal": "x86_64", # Intel Skylake
"m5n.metal": "x86_64", # Intel Cascade Lake
"m6i.metal": "x86_64", # Intel Icelake
"m6a.metal": "x86_64", # AMD Milan
"m6g.metal": "aarch64", # Graviton2
"m7g.metal": "aarch64", # Graviton3
}

DEFAULT_PLATFORMS = [
("al2", "linux_5.10"),
Expand Down Expand Up @@ -146,7 +146,7 @@ def __call__(self, parser, namespace, value, option_string=None):
"--instances",
required=False,
nargs="+",
default=DEFAULT_INSTANCES,
default=DEFAULT_INSTANCES.keys(),
)
COMMON_PARSER.add_argument(
"--platforms",
Expand Down Expand Up @@ -233,7 +233,7 @@ class BKPipeline:

parser = COMMON_PARSER

def __init__(self, initial_steps=None, with_build_step=True, **kwargs):
def __init__(self, with_build_step=True, **kwargs):
self.steps = []
self.args = args = self.parser.parse_args()
# Retry one time if agent was lost. This can happen if we terminate the
Expand All @@ -258,33 +258,22 @@ def __init__(self, initial_steps=None, with_build_step=True, **kwargs):
# Build sharing
if with_build_step:
build_cmds, self.shared_build = shared_build()
step_build = group("🏗️ Build", build_cmds, **self.per_arch)
self.steps += [step_build, "wait"]
self.build_group_per_arch(
"🏗️ Build", build_cmds, depends_on_build=False, key_prefix="build"
)
else:
self.shared_build = None

# If we run initial_steps before the "wait" step above, then a failure of the initial steps
# would result in the build not progressing past the "wait" step (as buildkite only proceeds past a wait step
# if everything before it passed). Thus put the initial steps after the "wait" step, but set `"depends_on": null`
# to start running them immediately (e.g. without waiting for the "wait" step to unblock).
#
# See also https://buildkite.com/docs/pipelines/dependencies#explicit-dependencies-in-uploaded-steps
if initial_steps:
for step in initial_steps:
step["depends_on"] = None

self.steps += initial_steps

def add_step(self, step, decorate=True):
def add_step(self, step, depends_on_build=True):
"""
Add a step to the pipeline.

https://buildkite.com/docs/pipelines/step-reference

:param step: a Buildkite step
:param decorate: inject needed commands for sharing builds
:param depends_on_build: inject needed commands for sharing builds
"""
if decorate and isinstance(step, dict):
if depends_on_build and isinstance(step, dict):
step = self._adapt_group(step)
self.steps.append(step)
return step
Expand All @@ -307,6 +296,10 @@ def _adapt_group(self, group):

for step in group["steps"]:
step["command"] = prepend + step["command"]
if self.shared_build is not None:
step["depends_on"] = (
"build_" + DEFAULT_INSTANCES[step["agents"]["instance"]]
)
return group

def build_group(self, *args, **kwargs):
Expand All @@ -315,16 +308,30 @@ def build_group(self, *args, **kwargs):

https://buildkite.com/docs/pipelines/group-step
"""
decorate = kwargs.pop("decorate", True)
depends_on_build = kwargs.pop("depends_on_build", True)
combined = overlay_dict(self.per_instance, kwargs)
return self.add_step(group(*args, **combined), decorate=decorate)
return self.add_step(
group(*args, **combined), depends_on_build=depends_on_build
)

def build_group_per_arch(self, *args, **kwargs):
def build_group_per_arch(self, label, *args, **kwargs):
"""
Build a group, parametrizing over the architectures only.

kwargs consumed by this method and not passed down to `group`:
- `depends_on_build` (default: `True`): Whether the steps in this group depend on the artifacts from the shared compilation steps
- `key_prefix`: If set, causes the generated steps to have a "key" field set to f"{key_prefix}_{$ARCH}".
"""
depends_on_build = kwargs.pop("depends_on_build", True)
key_prefix = kwargs.pop("key_prefix", None)
combined = overlay_dict(self.per_arch, kwargs)
return self.add_step(group(*args, **combined))
grp = group(label, *args, **combined)
if key_prefix:
for step in grp["steps"]:
step["key"] = (
key_prefix + "_" + DEFAULT_INSTANCES[step["agents"]["instance"]]
)
return self.add_step(grp, depends_on_build=depends_on_build)

def to_dict(self):
"""Render the pipeline as a dictionary."""
Expand Down
2 changes: 1 addition & 1 deletion .buildkite/pipeline_cpu_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class BkStep(str, Enum):
"tools/devtool -y test --no-build -- -m no_block_pr integration_tests/functional/test_cpu_template_helper.py -k test_guest_cpu_config_change",
],
BkStep.LABEL: "🖐️ fingerprint",
"instances": DEFAULT_INSTANCES,
"instances": DEFAULT_INSTANCES.keys(),
"platforms": DEFAULT_PLATFORMS,
},
"cpuid_wrmsr": {
Expand Down
18 changes: 11 additions & 7 deletions .buildkite/pipeline_pr.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,18 @@
pipeline = BKPipeline(
priority=DEFAULT_PRIORITY,
timeout_in_minutes=45,
initial_steps=[
{
"command": "./tools/devtool -y checkstyle",
"label": "🪶 Style",
},
],
with_build_step=not DOC_ONLY_CHANGE,
)

pipeline.add_step(
{
"command": "./tools/devtool -y checkstyle",
"label": "🪶 Style",
"depends_on": None,
},
depends_on_build=False,
)

# run sanity build of devtool if Dockerfile is changed
if any(x.parent.name == "devctr" for x in changed_files):
pipeline.build_group_per_arch(
Expand Down Expand Up @@ -58,7 +61,7 @@
platforms=[("al2", "linux_5.10")],
timeout_in_minutes=300,
**DEFAULTS_PERF,
decorate=False,
depends_on_build=False,
)
# modify Kani steps' label
for step in kani_grp["steps"]:
Expand All @@ -69,6 +72,7 @@
pipeline.build_group(
"📦 Build",
pipeline.devtool_test(pytest_opts="integration_tests/build/"),
depends_on_build=False,
)

pipeline.build_group(
Expand Down
42 changes: 0 additions & 42 deletions tests/integration_tests/build/test_dependencies.py

This file was deleted.

29 changes: 26 additions & 3 deletions tests/integration_tests/style/test_licenses.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import datetime

from framework import utils_repo
from framework.defs import FC_WORKSPACE_DIR
from host_tools.cargo_build import cargo

AMAZON_COPYRIGHT_YEARS = range(2018, datetime.datetime.now().year + 1)
AMAZON_COPYRIGHT = (
Expand Down Expand Up @@ -96,7 +98,6 @@ def _validate_license(filename):
or has_intel_copyright
or has_rivos_copyright
)
return True


def test_for_valid_licenses():
Expand All @@ -116,5 +117,27 @@ def test_for_valid_licenses():
assert not error_msg, f"Files {error_msg} have invalid licenses"


if __name__ == "__main__":
test_for_valid_licenses()
def test_dependency_licenses():
"""Ensure license compatibility for Firecracker.

For a list of currently allowed licenses checkout deny.toml in
the root directory.
"""
toml_file = FC_WORKSPACE_DIR / "Cargo.toml"

_, stdout, stderr = cargo(
"deny", f"--manifest-path {toml_file} check licenses bans"
)
assert "licenses ok" in stdout

# "cargo deny" should deny licenses by default but for some reason copyleft is allowed
# by it and if we add a dependency which has copyleft licenses "cargo deny" won't report
# it unless it is explicitly told to do so from the deny.toml.
# Our current deny.toml seems to cover all the cases we need but,
# if there is an exception like copyleft (where we don't want and don't deny
# in deny.toml and is allowed by cardo deny), we don't want to be left in the dark.
# For such cases check "cargo deny" output, make sure that there are no warnings reported
# related to the license and take appropriate actions i.e. either add them to allow list
# or remove them if they are incompatible with our licenses.
license_res = [line for line in stderr.split("\n") if "license" in line]
assert not license_res
Loading