diff --git a/.buildkite/branches.sh b/.buildkite/branches.sh new file mode 100644 index 000000000..66d7508ba --- /dev/null +++ b/.buildkite/branches.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +# This determines which branches will have pipelines triggered periodically, for tests and dra workflows. +BRANCHES=(main 8.9 7.17) diff --git a/.buildkite/dra-workflow.trigger.sh b/.buildkite/dra-workflow.trigger.sh new file mode 100755 index 000000000..0ee8b51a9 --- /dev/null +++ b/.buildkite/dra-workflow.trigger.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -euo pipefail + +echo "steps:" + +source .buildkite/branches.sh + +BRANCHES=(main 8.9 7.17) +for BRANCH in "${BRANCHES[@]}"; do + cat < ["20", "212"] +groupings = re.findall(r'Variant +"spark([0-9]+)scala([0-9]+)"', core) + +groupingsBySparkVersion: Dict[str, list[str]] = {} +for grouping in groupings: + if grouping[0] not in groupingsBySparkVersion: + groupingsBySparkVersion[grouping[0]] = [] + groupingsBySparkVersion[grouping[0]].append(grouping[1]) + +gradlePropertiesFile = open("gradle.properties", "r") +gradleProperties = gradlePropertiesFile.read() +gradlePropertiesFile.close() +# `scala210Version = 2.10.7` => ["210", "2.10.7"] +matches = re.findall( + r"scala([0-9]+)Version *= *([0-9]+\.[0-9]+\.[0-9]+)", gradleProperties +) + +scalaVersions = {} +for match in matches: + scalaVersions[match[0]] = match[1] + + +pipeline = { + "agents": { + "provider": "gcp", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "n2-standard-8", + "diskType": "pd-ssd", + "diskSizeGb": "100", + "useVault": "false", + }, + "steps": [], +} + +intakeTasks = map( + lambda sparkVersion: f"-x :elasticsearch-spark-{sparkVersion}:integrationTest", + groupingsBySparkVersion.keys(), +) + + +pipeline["steps"].append( + { + "label": "intake", + "timeout_in_minutes": 240, + "command": "./gradlew check " + " ".join(intakeTasks), + } +) + +for sparkVersion in groupingsBySparkVersion.keys(): + for scalaVersion in groupingsBySparkVersion[sparkVersion]: + scalaFullVersion = scalaVersions[scalaVersion] + pipeline["steps"].append( + { + "label": f"spark-{sparkVersion} / scala-{scalaFullVersion}", + "timeout_in_minutes": 180, + "command": f"./gradlew :elasticsearch-spark-{sparkVersion}:integrationTest -Pscala.variant={scalaFullVersion}", + } + ) + +if os.environ.get("ENABLE_DRA_WORKFLOW") == "true": + pipeline["steps"].append( + { + "wait": None, + } + ) + + pipeline["steps"].append( + { + "label": "DRA Snapshot Workflow", + "command": ".buildkite/dra.sh", + "timeout_in_minutes": 60, + "agents": {"useVault": "true"}, + "env": { + "USE_DRA_CREDENTIALS": "true", + }, + }, + ) + +print(json.dumps(pipeline, indent=2)) diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json new file mode 100644 index 000000000..c4300e010 --- /dev/null +++ b/.buildkite/pull-requests.json @@ -0,0 +1,27 @@ +{ + "jobs": [ + { + "enabled": true, + "pipeline_slug": "elasticsearch-hadoop-tests", + "allow_org_users": true, + "allowed_repo_permissions": [ + "admin", + "write" + ], + "set_commit_status": false, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))", + "always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))", + "skip_ci_labels": [ + "skip-ci" + ], + "skip_ci_on_only_changed": [ + "^docs/", + "\\.md$", + "\\.mdx$", + "^\\.buildkite/pull_requests\\.json$" + ] + } + ] +} diff --git a/.buildkite/tests.trigger.sh b/.buildkite/tests.trigger.sh new file mode 100755 index 000000000..4d7c5ead8 --- /dev/null +++ b/.buildkite/tests.trigger.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +set -euo pipefail + +echo "steps:" + +source .buildkite/branches.sh + +for BRANCH in "${BRANCHES[@]}"; do + cat <