Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 49 additions & 16 deletions .pipelines/templates/stages/validate_makefile/dev-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,50 @@ stages:
dependsOn: ${{ parameters.dependsOnStage }}

jobs:
- job: Make_Pipelines
displayName: Validate Pipelines
timeoutInMinutes: 30
pool:
type: linux
name: trident-ubuntu-1es-pool-eastus2
hostArchitecture: amd64
variables:
ob_outputDirectory: $(Build.SourcesDirectory)/build
steps:
- template: ../common_tasks/avoid-pypi-usage.yml

- script: |
set -eux

make check-pipelines BRANCH=$(Build.SourceBranch) NO_PARALLEL=true
displayName: "Validating pipeline templates work"
workingDirectory: $(Build.SourcesDirectory)
env:
AZURE_DEVOPS_EXT_PAT: $(System.AccessToken)
OVERRIDE_RUST_FEED: false

- job: Check_Licenses
displayName: Check Licenses
timeoutInMinutes: 30
pool:
type: linux
name: trident-ubuntu-1es-pool-eastus2
hostArchitecture: amd64
variables:
ob_outputDirectory: $(Build.SourcesDirectory)/build
steps:
- template: ../common_tasks/avoid-pypi-usage.yml
- template: ../common_tasks/rustup.yml
- template: ../common_tasks/cargo-auth.yml

- script: |
# Use a lower optimization level to speed up cargo-deny installation.
CARGO_PROFILE_RELEASE_OPT_LEVEL=0 cargo install --locked cargo-deny@0.16.2
displayName: Install cargo-deny

- script: cargo deny --all-features --workspace check licenses
displayName: Check licenses

- job: Make
displayName: make
timeoutInMinutes: 30
Expand Down Expand Up @@ -56,6 +100,11 @@ stages:

- bash: |
set -eux

# Use a lower optimization level to speed up the build, since
# we're only validating that the Makefile targets work.
export CARGO_PROFILE_RELEASE_OPT_LEVEL=0

make validate-configs
make bin/trident-rpms.tar.gz
make docker-build
Expand All @@ -71,19 +120,3 @@ stages:
workingDirectory: $(Build.SourcesDirectory)
env:
OVERRIDE_RUST_FEED: false

- script: |
set -eux

make check-pipelines BRANCH=$(Build.SourceBranch)
displayName: "Validating pipeline templates work"
workingDirectory: $(Build.SourcesDirectory)
env:
AZURE_DEVOPS_EXT_PAT: $(System.AccessToken)
OVERRIDE_RUST_FEED: false

- script: cargo install --locked cargo-deny@0.16.2
displayName: Install cargo-deny

- script: cargo deny --all-features --workspace check licenses
displayName: Check licenses
29 changes: 17 additions & 12 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -34,18 +34,23 @@ check-pipelines:
ifdef BRANCH
$(eval BRANCH_FLAG := -b $(BRANCH))
endif
./scripts/test-pipeline prism-cicd -q $(BRANCH_FLAG)
./scripts/test-pipeline azl-cicd -q $(BRANCH_FLAG)
./scripts/test-pipeline pr -q $(BRANCH_FLAG)
./scripts/test-pipeline pr-e2e -q $(BRANCH_FLAG)
./scripts/test-pipeline pr-e2e-azure -q $(BRANCH_FLAG)
./scripts/test-pipeline ci -q $(BRANCH_FLAG)
./scripts/test-pipeline pre -q $(BRANCH_FLAG)
./scripts/test-pipeline rel -q $(BRANCH_FLAG)
./scripts/test-pipeline testing -q $(BRANCH_FLAG)
./scripts/test-pipeline tester -q $(BRANCH_FLAG)
./scripts/test-pipeline scale-official -q $(BRANCH_FLAG)
./scripts/test-pipeline full-validation -q $(BRANCH_FLAG)
ifndef NO_PARALLEL
$(eval PARALLEL_FLAG := --parallel)
endif
# Note: the az-cli version in pipelines does not like --parallel, so run sequentially.
./scripts/test-pipeline $(PARALLEL_FLAG) -q $(BRANCH_FLAG) \
prism-cicd \
azl-cicd \
pr \
pr-e2e \
pr-e2e-azure \
ci \
pre \
rel \
testing \
tester \
scale-official \
full-validation

.PHONY: check-sh
check-sh:
Expand Down
151 changes: 95 additions & 56 deletions scripts/test-pipeline
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ parser.add_argument(
type=str,
help="The pipeline to preview",
choices=pipeline_metadata_map.keys(),
nargs="+",
)

parser.add_argument(
Expand All @@ -70,14 +71,75 @@ parser.add_argument(
"-q", "--quiet", action="store_true", help="Suppress YAML output", default=False
)

parser.add_argument(
"--parallel",
action="store_true",
help="Preview pipelines in parallel",
)

args = parser.parse_args()

if args.parallel and not args.quiet:
parser.error("--parallel requires --quiet (-q)")
Copy link

Copilot AI Feb 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The error message could be more descriptive about why --parallel requires --quiet. Consider expanding it to explain that parallel execution may interleave output, making it unreadable without the quiet flag.

Suggested change
parser.error("--parallel requires --quiet (-q)")
parser.error(
"--parallel requires --quiet (-q) because parallel execution may interleave "
"output from multiple pipelines, making it unreadable without quiet mode."
)

Copilot uses AI. Check for mistakes.


def check_pipeline(pipeline_name: str, selected_branch: str, quiet: bool = False):
pipeline_metadata = pipeline_metadata_map[pipeline_name]

if pipeline_metadata.id == -1:
raise Exception(f"Pipeline '{pipeline_name}' is not defined in the script")

print(
f"Checking pipeline '{pipeline_name}' with ID '{pipeline_metadata.id}' on branch '{selected_branch}'",
file=sys.stderr,
)

payload = {
"previewRun": True,
"resources": {"repositories": {"self": {"refName": selected_branch}}},
}

if pipeline_metadata.parameters:
payload["templateParameters"] = pipeline_metadata.parameters

with tempfile.NamedTemporaryFile() as payload_file:
payload_file.write(json.dumps(payload).encode("utf-8"))
payload_file.flush()

cmd = [
"az",
"devops",
"invoke",
"--org",
"https://dev.azure.com/mariner-org",
"--api-version",
"7.0",
"--area",
"pipelines",
"--resource",
"runs",
"--route-parameters",
f"project={pipeline_metadata.project}",
f"pipelineId={pipeline_metadata.id}",
"--http-method",
"POST",
"--in-file",
payload_file.name,
]
output = subprocess.run(
cmd,
capture_output=True,
)

if output.returncode != 0:
raise Exception(f"Pipeline preview failed: {output.stderr.decode('utf-8')}")

print(f"Pipeline '{pipeline_name}' previewed successfully", file=sys.stderr)

pipeline_metadata = pipeline_metadata_map[args.pipeline]
if not quiet:
out_json = json.loads(output.stdout.decode("utf-8"))
print(out_json["finalYaml"])

if pipeline_metadata.id == -1:
print("Pipeline does not exist yet", file=sys.stderr)
exit(2)

if args.branch:
selected_branch = args.branch
Expand All @@ -93,55 +155,32 @@ else:
.strip()
)

print(
f"Checking pipeline '{args.pipeline}' with ID '{pipeline_metadata.id}' on branch '{selected_branch}'",
file=sys.stderr,
)

payload = {
"previewRun": True,
"resources": {"repositories": {"self": {"refName": selected_branch}}},
}

if pipeline_metadata.parameters:
payload["templateParameters"] = pipeline_metadata.parameters

with tempfile.NamedTemporaryFile() as payload_file:
payload_file.write(json.dumps(payload).encode("utf-8"))
payload_file.flush()

cmd = [
"az",
"devops",
"invoke",
"--org",
"https://dev.azure.com/mariner-org",
"--api-version",
"7.0",
"--area",
"pipelines",
"--resource",
"runs",
"--route-parameters",
f"project={pipeline_metadata.project}",
f"pipelineId={pipeline_metadata.id}",
"--http-method",
"POST",
"--in-file",
payload_file.name,
]
output = subprocess.run(
cmd,
capture_output=True,
)

if output.returncode != 0:
print("Failed to preview pipeline:", file=sys.stderr)
print(output.stderr.decode("utf-8"), file=sys.stderr)
exit(1)

print("Pipeline previewed successfully", file=sys.stderr)

if not args.quiet:
out_json = json.loads(output.stdout.decode("utf-8"))
print(out_json["finalYaml"])
if args.parallel:
import concurrent.futures

with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {
executor.submit(check_pipeline, name, selected_branch): name
Copy link

Copilot AI Feb 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The check_pipeline function is called without the quiet parameter in parallel mode. This means output will be printed even when --parallel is used with --quiet, which contradicts the requirement enforced at line 82-83. Pass args.quiet as the third argument to check_pipeline.

Suggested change
executor.submit(check_pipeline, name, selected_branch): name
executor.submit(check_pipeline, name, selected_branch, args.quiet): name

Copilot uses AI. Check for mistakes.
for name in args.pipeline
}
failures = []
for future in concurrent.futures.as_completed(futures):
name = futures[future]
try:
future.result()
except Exception as e:
print(f"Error previewing pipeline '{name}': {e}", file=sys.stderr)
failures.append(name)
if failures:
print(
f"Failed pipelines: {', '.join(failures)}",
file=sys.stderr,
)
exit(1)
else:
for pipeline_name in args.pipeline:
try:
check_pipeline(pipeline_name, selected_branch, args.quiet)
except Exception as e:
print(f"Error previewing pipeline '{pipeline_name}': {e}", file=sys.stderr)
exit(1)
Comment thread
frhuelsz marked this conversation as resolved.