Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .buildkite/branches.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash

# This determines which branches will have pipelines triggered periodically, for tests and dra workflows.
BRANCHES=(main 8.9 7.17)
27 changes: 27 additions & 0 deletions .buildkite/dra-workflow.trigger.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
#!/bin/bash

set -euo pipefail

echo "steps:"

source .buildkite/branches.sh

BRANCHES=(main 8.9 7.17)
for BRANCH in "${BRANCHES[@]}"; do
cat <<EOF
- trigger: elasticsearch-hadoop-dra-workflow
label: Trigger DRA snapshot workflow for $BRANCH
async: true
build:
branch: $BRANCH
env:
DRA_WORKFLOW: snapshot
- trigger: elasticsearch-hadoop-dra-workflow
label: Trigger DRA staging workflow for $BRANCH
async: true
build:
branch: $BRANCH
env:
DRA_WORKFLOW: staging
EOF
done
6 changes: 6 additions & 0 deletions .buildkite/dra-workflow.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
steps:
- label: DRA Workflow
command: .buildkite/dra.sh
timeout_in_minutes: 60
env:
USE_DRA_CREDENTIALS: true
64 changes: 64 additions & 0 deletions .buildkite/dra.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
#!/bin/bash

set -euo pipefail

DRA_WORKFLOW=${DRA_WORKFLOW:-snapshot}

if [[ "$BUILDKITE_BRANCH" == "main" && "$DRA_WORKFLOW" == "staging" ]]; then
exit 0
fi

echo --- Creating distribution

rm -Rfv ~/.gradle/init.d
HADOOP_VERSION=$(grep eshadoop buildSrc/esh-version.properties | sed "s/eshadoop *= *//g")

VERSION_SUFFIX=""
BUILD_ARGS="-Dbuild.snapshot=false"
if [[ "$DRA_WORKFLOW" == "snapshot" ]]; then
VERSION_SUFFIX="-SNAPSHOT"
BUILD_ARGS="-Dbuild.snapshot=true"
fi

RM_BRANCH="$BUILDKITE_BRANCH"
if [[ "$BUILDKITE_BRANCH" == "main" ]]; then
RM_BRANCH=master
fi

echo "DRA_WORKFLOW=$DRA_WORKFLOW"
echo "HADOOP_VERSION=$HADOOP_VERSION"
echo "RM_BRANCH=$RM_BRANCH"
echo "VERSION_SUFFIX=$VERSION_SUFFIX"
echo "BUILD_ARGS=$BUILD_ARGS"

ES_BUILD_ID=$(curl -sS "https://artifacts-$DRA_WORKFLOW.elastic.co/elasticsearch/latest/${RM_BRANCH}.json" | jq -r '.build_id')
echo "ES_BUILD_ID=$ES_BUILD_ID"

mkdir localRepo
wget --quiet "https://artifacts-$DRA_WORKFLOW.elastic.co/elasticsearch/${ES_BUILD_ID}/maven/org/elasticsearch/gradle/build-tools/${HADOOP_VERSION}${VERSION_SUFFIX}/build-tools-${HADOOP_VERSION}${VERSION_SUFFIX}.jar" \
-O "localRepo/build-tools-${HADOOP_VERSION}${VERSION_SUFFIX}.jar"

./gradlew -S -PlocalRepo=true "${BUILD_ARGS}" -Dorg.gradle.warning.mode=summary -Dcsv="$WORKSPACE/build/distributions/dependencies-${HADOOP_VERSION}${VERSION_SUFFIX}.csv" :dist:generateDependenciesReport distribution

# Allow other users access to read the artifacts so they are readable in the container
find "$WORKSPACE" -type f -path "*/build/distributions/*" -exec chmod a+r {} \;

# Allow other users write access to create checksum files
find "$WORKSPACE" -type d -path "*/build/distributions" -exec chmod a+w {} \;

echo --- Running release-manager

docker run --rm \
--name release-manager \
-e VAULT_ADDR="$DRA_VAULT_ADDR" \
-e VAULT_ROLE_ID="$DRA_VAULT_ROLE_ID_SECRET" \
-e VAULT_SECRET_ID="$DRA_VAULT_SECRET_ID_SECRET" \
--mount type=bind,readonly=false,src="$PWD",target=/artifacts \
docker.elastic.co/infra/release-manager:latest \
cli collect \
--project elasticsearch-hadoop \
--branch "$RM_BRANCH" \
--commit "$BUILDKITE_COMMIT" \
--workflow "$DRA_WORKFLOW" \
--version "$HADOOP_VERSION" \
--artifact-set main
48 changes: 48 additions & 0 deletions .buildkite/hooks/pre-command
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#!/bin/bash

WORKSPACE="$(pwd)"
export WORKSPACE

# i.e. source and export the variables
export $(cat .ci/java-versions.properties | grep '=' | xargs)

JAVA_HOME="$HOME/.java/$ESH_BUILD_JAVA"
export JAVA_HOME

RUNTIME_JAVA_HOME="$HOME/.java/$ESH_RUNTIME_JAVA"
export RUNTIME_JAVA_HOME

JAVA7_HOME="$HOME/.java/java7"
export JAVA7_HOME

JAVA8_HOME="$HOME/.java/java8"
export JAVA8_HOME

JAVA9_HOME="$HOME/.java/java9"
export JAVA9_HOME

JAVA10_HOME="$HOME/.java/java10"
export JAVA10_HOME

JAVA11_HOME="$HOME/.java/java11"
export JAVA11_HOME

JAVA12_HOME="$HOME/.java/openjdk12"
export JAVA12_HOME

JAVA13_HOME="$HOME/.java/openjdk13"
export JAVA13_HOME

JAVA14_HOME="$HOME/.java/openjdk14"
export JAVA14_HOME

if [[ "$USE_DRA_CREDENTIALS" == "true" ]]; then
DRA_VAULT_ROLE_ID_SECRET=$(vault read -field=role-id secret/ci/elastic-elasticsearch-hadoop/legacy-vault-credentials)
export DRA_VAULT_ROLE_ID_SECRET

DRA_VAULT_SECRET_ID_SECRET=$(vault read -field=secret-id secret/ci/elastic-elasticsearch-hadoop/legacy-vault-credentials)
export DRA_VAULT_SECRET_ID_SECRET

DRA_VAULT_ADDR=https://secrets.elastic.co:8200
export DRA_VAULT_ADDR
fi
93 changes: 93 additions & 0 deletions .buildkite/pipeline.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
#!/usr/bin/env python

import json
import os
import re
from typing import Dict

# Note: If you'd like to add any debug info here, make sure to do it on stderr
# stdout will be fed into `buildkite-agent pipeline upload`

coreFile = open("spark/core/build.gradle", "r")
core = coreFile.read()
coreFile.close()

# `Variant "spark20scala212"` => ["20", "212"]
groupings = re.findall(r'Variant +"spark([0-9]+)scala([0-9]+)"', core)

groupingsBySparkVersion: Dict[str, list[str]] = {}
for grouping in groupings:
if grouping[0] not in groupingsBySparkVersion:
groupingsBySparkVersion[grouping[0]] = []
groupingsBySparkVersion[grouping[0]].append(grouping[1])

gradlePropertiesFile = open("gradle.properties", "r")
gradleProperties = gradlePropertiesFile.read()
gradlePropertiesFile.close()
# `scala210Version = 2.10.7` => ["210", "2.10.7"]
matches = re.findall(
r"scala([0-9]+)Version *= *([0-9]+\.[0-9]+\.[0-9]+)", gradleProperties
)

scalaVersions = {}
for match in matches:
scalaVersions[match[0]] = match[1]


pipeline = {
"agents": {
"provider": "gcp",
"image": "family/elasticsearch-ubuntu-2004",
"machineType": "n2-standard-8",
"diskType": "pd-ssd",
"diskSizeGb": "100",
"useVault": "false",
},
"steps": [],
}

intakeTasks = map(
lambda sparkVersion: f"-x :elasticsearch-spark-{sparkVersion}:integrationTest",
groupingsBySparkVersion.keys(),
)


pipeline["steps"].append(
{
"label": "intake",
"timeout_in_minutes": 240,
"command": "./gradlew check " + " ".join(intakeTasks),
}
)

for sparkVersion in groupingsBySparkVersion.keys():
for scalaVersion in groupingsBySparkVersion[sparkVersion]:
scalaFullVersion = scalaVersions[scalaVersion]
pipeline["steps"].append(
{
"label": f"spark-{sparkVersion} / scala-{scalaFullVersion}",
"timeout_in_minutes": 180,
"command": f"./gradlew :elasticsearch-spark-{sparkVersion}:integrationTest -Pscala.variant={scalaFullVersion}",
}
)

if os.environ.get("ENABLE_DRA_WORKFLOW") == "true":
pipeline["steps"].append(
{
"wait": None,
}
)

pipeline["steps"].append(
{
"label": "DRA Snapshot Workflow",
"command": ".buildkite/dra.sh",
"timeout_in_minutes": 60,
"agents": {"useVault": "true"},
"env": {
"USE_DRA_CREDENTIALS": "true",
},
},
)

print(json.dumps(pipeline, indent=2))
27 changes: 27 additions & 0 deletions .buildkite/pull-requests.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"jobs": [
{
"enabled": true,
"pipeline_slug": "elasticsearch-hadoop-tests",
"allow_org_users": true,
"allowed_repo_permissions": [
"admin",
"write"
],
"set_commit_status": false,
"build_on_commit": true,
"build_on_comment": true,
"trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))",
"always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))",
"skip_ci_labels": [
"skip-ci"
],
"skip_ci_on_only_changed": [
"^docs/",
"\\.md$",
"\\.mdx$",
"^\\.buildkite/pull_requests\\.json$"
]
}
]
}
20 changes: 20 additions & 0 deletions .buildkite/tests.trigger.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#!/bin/bash

set -euo pipefail

echo "steps:"

source .buildkite/branches.sh

for BRANCH in "${BRANCHES[@]}"; do
cat <<EOF
- trigger: elasticsearch-hadoop-tests
label: Trigger tests pipeline for $BRANCH
async: true
build:
branch: $BRANCH
EOF
done



1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,4 @@ metastore_db
/spark/keyvaluerdd.parquet
out/
localRepo/
.vscode
Loading