diff --git a/.buildkite/hooks/post-checkout b/.buildkite/hooks/post-checkout new file mode 100755 index 00000000000..b1572902664 --- /dev/null +++ b/.buildkite/hooks/post-checkout @@ -0,0 +1,54 @@ +#!/bin/bash + +set -euo pipefail + +checkout_merge() { + local target_branch=$1 + local pr_commit=$2 + local merge_branch=$3 + + if [[ -z "${target_branch}" ]]; then + echo "No pull request target branch" + exit 1 + fi + + git fetch -v origin "${target_branch}" + git checkout FETCH_HEAD + echo "Current branch: $(git rev-parse --abbrev-ref HEAD)" + + # create temporal branch to merge the PR with the target branch + git checkout -b ${merge_branch} + echo "New branch created: $(git rev-parse --abbrev-ref HEAD)" + + # set author identity so it can be run git merge + git config user.name "github-merged-pr-post-checkout" + git config user.email "auto-merge@buildkite" + + git merge --no-edit "${BUILDKITE_COMMIT}" || { + local merge_result=$? + echo "Merge failed: ${merge_result}" + git merge --abort + exit ${merge_result} + } +} + +pull_request="${BUILDKITE_PULL_REQUEST:-false}" + +if [[ "${pull_request}" == "false" ]]; then + echo "Not a pull request, skipping" + exit 0 +fi + +TARGET_BRANCH="${BUILDKITE_PULL_REQUEST_BASE_BRANCH:-master}" +PR_COMMIT="${BUILDKITE_COMMIT}" +PR_ID=${BUILDKITE_PULL_REQUEST} +MERGE_BRANCH="pr_merge_${PR_ID}" + +checkout_merge "${TARGET_BRANCH}" "${PR_COMMIT}" "${MERGE_BRANCH}" + +echo "Commit information" + +git --no-pager log --format=%B -n 1 + +# Ensure buildkite groups are rendered +echo "" diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command new file mode 100755 index 00000000000..3acf1c5cd90 --- /dev/null +++ b/.buildkite/hooks/pre-command @@ -0,0 +1,78 @@ +#!/bin/bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +# Avoid any pager when running git commands +git config --global core.pager 'cat' + +export UPLOAD_SAFE_LOGS=${UPLOAD_SAFE_LOGS:-"0"} +export BASE_DIR=$(pwd) +export GO_VERSION=$(cat .go-version) + +export REPO_NAME=$(repo_name "${BUILDKITE_REPO}") +export TMP_FOLDER_TEMPLATE_BASE="tmp.${REPO_NAME}" +export TMP_FOLDER_TEMPLATE="${TMP_FOLDER_TEMPLATE_BASE}.XXXXXXXXX" + +export REPO_BUILD_TAG="${REPO_NAME}/$(buildkite_pr_branch_build_id)" + +JENKINS_API_TOKEN_PATH=kv/ci-shared/platform-ingest/jenkins_api_tokens +SIGNING_PACKAGES_GCS_CREDENTIALS_PATH=kv/ci-shared/platform-ingest/signing_packages_gcs_artifacts_credentials +PACKAGE_UPLOADER_GCS_CREDENTIALS_PATH=kv/ci-shared/platform-ingest/package_storage_uploader +PRIVATE_CI_GCS_CREDENTIALS_PATH=kv/ci-shared/platform-ingest/private_ci_artifacts_gcs_credentials + +EC_TOKEN_PATH=kv/ci-shared/platform-ingest/platform-ingest-ec-qa +EC_DATA_PATH=secret/ci/elastic-integrations/ec_data + +if [ -n "${ELASTIC_PACKAGE_LINKS_FILE_PATH+x}" ]; then + # first upload pipeline does not have the environment variables defined in the YAML + export ELASTIC_PACKAGE_LINKS_FILE_PATH=${BASE_DIR}/${ELASTIC_PACKAGE_LINKS_FILE_PATH} +fi + +if [ ${BUILDKITE_PIPELINE_SLUG} == "integrations" ]; then + if [ ${BUILDKITE_STEP_KEY} == "publish-packages" ]; then + export JENKINS_USERNAME_SECRET=$(retry 5 vault kv get -field username ${JENKINS_API_TOKEN_PATH}) + export JENKINS_HOST_SECRET=$(retry 5 vault kv get -field internal_ci_host ${JENKINS_API_TOKEN_PATH}) + export JENKINS_TOKEN=$(retry 5 vault kv get -field internal_ci ${JENKINS_API_TOKEN_PATH}) + + # signing job + export SIGNING_PACKAGES_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field value ${SIGNING_PACKAGES_GCS_CREDENTIALS_PATH}) + + # publishing job + export PACKAGE_UPLOADER_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field value ${PACKAGE_UPLOADER_GCS_CREDENTIALS_PATH}) + fi +fi + +if [ ${BUILDKITE_PIPELINE_SLUG} == "integrations" ]; then + # FIXME: update condition depending on the pipeline steps triggered + if [[ ${BUILDKITE_STEP_KEY} == "test-integrations" ]]; then + export ELASTIC_PACKAGE_AWS_SECRET_KEY=$(retry 5 vault kv get -field secret_key ${AWS_SERVICE_ACCOUNT_SECRET_PATH}) + export ELASTIC_PACKAGE_AWS_ACCESS_KEY=$(retry 5 vault kv get -field access_key ${AWS_SERVICE_ACCOUNT_SECRET_PATH}) + + export PRIVATE_CI_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field plaintext ${PRIVATE_CI_GCS_CREDENTIALS_PATH}) + export JOB_GCS_BUCKET_INTERNAL="fleet-ci-temp-internal" + + # Environment variables required by the service deployer + export AWS_SECRET_ACCESS_KEY=${ELASTIC_PACKAGE_AWS_SECRET_KEY} + export AWS_ACCESS_KEY_ID=${ELASTIC_PACKAGE_AWS_ACCESS_KEY} + fi +fi + +if [ ${BUILDKITE_PIPELINE_SLUG} == "integrations-serverless" ]; then + if [[ ${BUILDKITE_STEP_KEY} == "test-integrations-serverless-project" ]]; then + export ELASTIC_PACKAGE_AWS_SECRET_KEY=$(retry 5 vault kv get -field secret_key ${AWS_SERVICE_ACCOUNT_SECRET_PATH}) + export ELASTIC_PACKAGE_AWS_ACCESS_KEY=$(retry 5 vault kv get -field access_key ${AWS_SERVICE_ACCOUNT_SECRET_PATH}) + + export PRIVATE_CI_GCS_CREDENTIALS_SECRET=$(retry 5 vault kv get -field plaintext ${PRIVATE_CI_GCS_CREDENTIALS_PATH}) + export JOB_GCS_BUCKET_INTERNAL="fleet-ci-temp-internal" + + # Environment variables required by the service deployer + export AWS_SECRET_ACCESS_KEY=${ELASTIC_PACKAGE_AWS_SECRET_KEY} + export AWS_ACCESS_KEY_ID=${ELASTIC_PACKAGE_AWS_ACCESS_KEY} + + export EC_API_KEY_SECRET=$(retry 5 vault kv get -field apiKey ${EC_TOKEN_PATH}) + export EC_HOST_SECRET=$(retry 5 vault kv get -field url ${EC_TOKEN_PATH}) + export EC_REGION_SECRET=$(retry 5 vault read -field region_qa ${EC_DATA_PATH}) + fi +fi diff --git a/.buildkite/hooks/pre-exit b/.buildkite/hooks/pre-exit new file mode 100755 index 00000000000..7952763d649 --- /dev/null +++ b/.buildkite/hooks/pre-exit @@ -0,0 +1,51 @@ +#!/bin/bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "integrations" ]]; then + # FIXME: update condition depending on the pipeline steps triggered + if [[ "$BUILDKITE_STEP_KEY" == "test-integrations" ]]; then + unset ELASTIC_PACKAGE_AWS_ACCESS_KEY + unset ELASTIC_PACKAGE_AWS_SECRET_KEY + unset AWS_ACCESS_KEY_ID + unset AWS_SECRET_ACCESS_KEY + + # Ensure that kind cluster is deleted + delete_kind_cluster + + # Ensure elastic stack is stopped + if [ -f ${ELASTIC_PACKAGE_BIN} ]; then + echo "--- Take down the Elastic stack" + ${ELASTIC_PACKAGE_BIN} stack down -v + fi + fi +fi + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "integrations-serverless" ]]; then + if [[ "$BUILDKITE_STEP_KEY" == "test-integrations-serverless-project" ]]; then + unset ELASTIC_PACKAGE_AWS_ACCESS_KEY + unset ELASTIC_PACKAGE_AWS_SECRET_KEY + unset AWS_ACCESS_KEY_ID + unset AWS_SECRET_ACCESS_KEY + + # Ensure that kind cluster is deleted + delete_kind_cluster + + # Ensure elastic stack is stopped + if [ -f ${ELASTIC_PACKAGE_BIN} ]; then + echo "--- Take down the Elastic stack" + export EC_API_KEY=${EC_API_KEY_SECRET} + export EC_HOST=${EC_HOST_SECRET} + + ${ELASTIC_PACKAGE_BIN} stack down -v + + unset EC_API_KEY + unset EC_HOST + fi + fi +fi + +unset_secrets +cleanup diff --git a/.buildkite/pipeline.schedule-daily.yml b/.buildkite/pipeline.schedule-daily.yml new file mode 100644 index 00000000000..f64001d85a0 --- /dev/null +++ b/.buildkite/pipeline.schedule-daily.yml @@ -0,0 +1,36 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +name: integrations-schedule-daily + +env: + SETUP_GVM_VERSION: "v0.5.1" + LINUX_AGENT_IMAGE: "golang:${GO_VERSION}" + +# The pipeline is triggered by the scheduler every day +steps: + - label: ":white_check_mark: Check go sources" + key: "check" + command: ".buildkite/scripts/check_sources.sh" + agents: + image: "${LINUX_AGENT_IMAGE}" + cpu: "8" + memory: "4G" + + - label: "Check integrations in serverless - project: Observability" + key: "trigger-integrations-serverless-obs" + trigger: "integrations-serverless" + build: + env: + SERVERLESS_PROJECT: observability + depends_on: + - step: "check" + allow_failure: false + + - label: "Check integrations in serverless - project: Security" + key: "trigger-integrations-serverless-security" + trigger: "integrations-serverless" + build: + env: + SERVERLESS_PROJECT: security + depends_on: + - step: "check" + allow_failure: false diff --git a/.buildkite/pipeline.serverless.yml b/.buildkite/pipeline.serverless.yml new file mode 100644 index 00000000000..57b678a234f --- /dev/null +++ b/.buildkite/pipeline.serverless.yml @@ -0,0 +1,73 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json + +env: + SETUP_GVM_VERSION: "v0.5.1" + LINUX_AGENT_IMAGE: "golang:${GO_VERSION}" + DOCKER_COMPOSE_VERSION: "v2.17.2" + KIND_VERSION: 'v0.20.0' + K8S_VERSION: 'v1.27.3' + YQ_VERSION: 'v4.35.2' + # Elastic package settings + # Manage docker output/logs + ELASTIC_PACKAGE_COMPOSE_DISABLE_ANSI: "true" + ELASTIC_PACKAGE_COMPOSE_DISABLE_PULL_PROGRESS_INFORMATION: "true" + # Default license to use by `elastic-package build` + ELASTIC_PACKAGE_REPOSITORY_LICENSE: "licenses/Elastic-2.0.txt" + # Link definitions path (full path to be set in the corresponding step) + ELASTIC_PACKAGE_LINKS_FILE_PATH: "links_table.yml" + # Disable comparison of results in pipeline tests to avoid errors related to GeoIP fields + ELASTIC_PACKAGE_SERVERLESS_PIPELINE_TEST_DISABLE_COMPARE_RESULTS: "true" + +steps: + - input: "Input values for the variables" + key: "input-variables" + fields: + - select: "SERVERLESS_PROJECT" + key: "SERVERLESS_PROJECT" + options: + - label: "observability" + value: "observability" + - label: "security" + value: "security" + default: "observability" + if: "build.source == 'ui'" + + - wait: ~ + if: "build.source == 'ui'" + allow_dependency_failure: false + + - label: ":white_check_mark: Check go sources" + key: "check" + command: ".buildkite/scripts/check_sources.sh" + agents: + image: "${LINUX_AGENT_IMAGE}" + cpu: "8" + memory: "4G" + + - label: "Check integrations in serverless" + key: "test-integrations-serverless-project" + command: ".buildkite/scripts/test_integrations_with_serverless.sh" + timeout_in_minutes: 120 + env: + SERVERLESS: true + FORCE_CHECK_ALL: true + UPLOAD_SAFE_LOGS: 1 + agents: + provider: "gcp" + artifact_paths: + - "build/test-results/*.xml" + # - "build/elastic-stack-dump/*/logs/*.log" + # - "build/elastic-stack-dump/*/logs/fleet-server-internal/**/*" + depends_on: + - step: "check" + allow_failure: false + + - wait: ~ + continue_on_failure: true + + - label: ":junit: Junit annotate" + plugins: + - junit-annotate#v2.4.1: + artifacts: "build/test-results/*.xml" + agents: + provider: "gcp" # junit plugin requires docker diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 34321b61161..a9d78b8e6cb 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -1,5 +1,55 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + SETUP_GVM_VERSION: "v0.5.1" + LINUX_AGENT_IMAGE: "golang:${GO_VERSION}" + DOCKER_COMPOSE_VERSION: "v2.17.2" + KIND_VERSION: 'v0.20.0' + K8S_VERSION: 'v1.27.3' + YQ_VERSION: 'v4.35.2' + # Elastic package settings + # Manage docker output/logs + ELASTIC_PACKAGE_COMPOSE_DISABLE_ANSI: "true" + ELASTIC_PACKAGE_COMPOSE_DISABLE_PULL_PROGRESS_INFORMATION: "true" + # Default license to use by `elastic-package build` + ELASTIC_PACKAGE_REPOSITORY_LICENSE: "licenses/Elastic-2.0.txt" + # Link definitions path (full path to be set in the corresponding step) + ELASTIC_PACKAGE_LINKS_FILE_PATH: "links_table.yml" + # Disable comparison of results in pipeline tests to avoid errors related to GeoIP fields + ELASTIC_PACKAGE_SERVERLESS_PIPELINE_TEST_DISABLE_COMPARE_RESULTS: "true" + steps: - - label: "Example test" - command: echo "Hello!" + - label: ":white_check_mark: Check go sources" + key: "check" + command: ".buildkite/scripts/check_sources.sh" + agents: + image: "${LINUX_AGENT_IMAGE}" + cpu: "8" + memory: "4G" + + # TODO: Pending to migrate stages from https://github.com/elastic/integrations/blob/993537b80456edf2035f2a4826031841116c2019/.ci/Jenkinsfile + # - label: "Publish packages - INCOMPLETE" + # key: "publish-packages" + # command: ".buildkite/scripts/publish_packages.sh" # TODO: missing signature and publishing loops + # agents: + # provider: "gcp" + # depends_on: + # - step: "check" + # allow_failure: false + + # - label: "Trigger integrations" + # key: "trigger-integrations" + # command: ".buildkite/scripts/trigger_integrations_in_parallel.sh" # TODO: missing script + # depends_on: + # - step: "publish-packages" + # allow_failure: false + + # - wait: ~ + # continue_on_failure: true + + # - label: ":junit: Junit annotate" + # plugins: + # - junit-annotate#v2.4.1: + # artifacts: "build/test-results/*.xml" + # agents: + # provider: "gcp" # junit plugin requires docker diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index f18e9c0a66a..404452152a0 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -1,20 +1,52 @@ { - "jobs": [ - { - "enabled": true, - "pipelineSlug": "integrations", - "allow_org_users": true, - "allowed_repo_permissions": ["admin", "write"], - "allowed_list": [ ], - "set_commit_status": true, - "build_on_commit": true, - "build_on_comment": true, - "trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^/test$", - "always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^/test$", - "skip_ci_labels": [ ], - "skip_target_branches": [ ], - "skip_ci_on_only_changed": [ ], - "always_require_ci_on_changed": [ ] - } - ] + "jobs": [ + { + "enabled": true, + "pipelineSlug": "integrations", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": ["dependabot[bot]", "mergify[bot]"], + "set_commit_status": true, + "build_on_commit": true, + "build_on_comment": true, + "trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^/test$", + "always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^/test$", + "skip_ci_labels": [], + "skip_target_branches": [], + "skip_ci_on_only_changed": [], + "always_require_ci_on_changed": [] + }, + { + "enabled": false, + "pipelineSlug": "integrations-schedule-daily", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [], + "set_commit_status": false, + "build_on_commit": false, + "build_on_comment": false, + "trigger_comment_regex": "", + "always_trigger_comment_regex": "", + "skip_ci_labels": [], + "skip_target_branches": [], + "skip_ci_on_only_changed": [], + "always_require_ci_on_changed": [] + }, + { + "enabled": false, + "pipelineSlug": "integrations-serverless", + "allow_org_users": true, + "allowed_repo_permissions": ["admin", "write"], + "allowed_list": [], + "set_commit_status": false, + "build_on_commit": false, + "build_on_comment": false, + "trigger_comment_regex": "", + "always_trigger_comment_regex": "", + "skip_ci_labels": [], + "skip_target_branches": [], + "skip_ci_on_only_changed": [], + "always_require_ci_on_changed": [] + } + ] } diff --git a/.buildkite/scripts/check_sources.sh b/.buildkite/scripts/check_sources.sh new file mode 100755 index 00000000000..cb1261a1616 --- /dev/null +++ b/.buildkite/scripts/check_sources.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +add_bin_path +with_mage + +mage -debug check + +check_git_diff diff --git a/.buildkite/scripts/common.sh b/.buildkite/scripts/common.sh new file mode 100755 index 00000000000..30f72681c5e --- /dev/null +++ b/.buildkite/scripts/common.sh @@ -0,0 +1,609 @@ +#!/bin/bash + +set -euo pipefail + +WORKSPACE="$(pwd)" +BIN_FOLDER="${WORKSPACE}/bin" +platform_type="$(uname)" +hw_type="$(uname -m)" +platform_type_lowercase="${platform_type,,}" + +GOOGLE_CREDENTIALS_FILENAME="google-cloud-credentials.json" +export ELASTIC_PACKAGE_BIN=${WORKSPACE}/build/elastic-package + +running_on_buildkite() { + if [[ "${BUILDKITE:-"false"}" == "true" ]]; then + return 0 + fi + return 1 +} + +retry() { + local retries=$1 + shift + local count=0 + until "$@"; do + exit=$? + wait=$((2 ** count)) + count=$((count + 1)) + if [ $count -lt "$retries" ]; then + >&2 echo "Retry $count/$retries exited $exit, retrying in $wait seconds..." + sleep $wait + else + >&2 echo "Retry $count/$retries exited $exit, no more retries left." + return $exit + fi + done + return 0 +} + +cleanup() { + echo "Deleting temporary files..." + rm -rf ${WORKSPACE}/${TMP_FOLDER_TEMPLATE_BASE}.* + echo "Done." +} + +unset_secrets () { + for var in $(printenv | sed 's;=.*;;' | sort); do + if [[ "$var" == *_SECRET || "$var" == *_TOKEN ]]; then + unset "$var" + fi + done +} + +repo_name() { + # Example of URL: git@github.com:acme-inc/my-project.git + local repoUrl=$1 + + orgAndRepo=$(echo $repoUrl | cut -d':' -f 2) + echo "$(basename ${orgAndRepo} .git)" +} + +check_platform_architecture() { + case "${hw_type}" in + "x86_64") + arch_type="amd64" + ;; + "aarch64") + arch_type="arm64" + ;; + "arm64") + arch_type="arm64" + ;; + *) + echo "The current platform/OS type is unsupported yet" + ;; + esac +} + +# Helpers to install required tools +create_bin_folder() { + mkdir -p ${BIN_FOLDER} +} + +add_bin_path() { + create_bin_folder + echo "Adding PATH to the environment variables..." + export PATH="${BIN_FOLDER}:${PATH}" # TODO: set bin folder after PATH +} + +with_go() { + create_bin_folder + echo "--- Setting up the Go environment..." + check_platform_architecture + echo " GVM ${SETUP_GVM_VERSION} (platform ${platform_type_lowercase} arch ${arch_type}" + retry 5 curl -sL -o ${BIN_FOLDER}/gvm "https://github.com/andrewkroh/gvm/releases/download/${SETUP_GVM_VERSION}/gvm-${platform_type_lowercase}-${arch_type}" + chmod +x ${BIN_FOLDER}/gvm + eval "$(gvm $(cat .go-version))" + go version + which go + export PATH="${PATH}:$(go env GOPATH):$(go env GOPATH)/bin" +} + +with_mage() { + create_bin_folder + with_go + + local install_packages=( + "github.com/magefile/mage" + "github.com/elastic/go-licenser" + "golang.org/x/tools/cmd/goimports" + "github.com/jstemmer/go-junit-report" + "gotest.tools/gotestsum" + ) + for pkg in "${install_packages[@]}"; do + go install "${pkg}@latest" + done + mage --version +} + +with_docker_compose() { + create_bin_folder + check_platform_architecture + + echo "--- Setting up the Docker-compose environment..." + retry 5 curl -sSL -o ${BIN_FOLDER}/docker-compose "https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-${platform_type_lowercase}-${hw_type}" + chmod +x ${BIN_FOLDER}/docker-compose + docker-compose version +} + +with_kubernetes() { + create_bin_folder + check_platform_architecture + + echo "--- Install kind" + retry 5 curl -sSLo ${BIN_FOLDER}/kind "https://github.com/kubernetes-sigs/kind/releases/download/${KIND_VERSION}/kind-${platform_type_lowercase}-${arch_type}" + chmod +x ${BIN_FOLDER}/kind + kind version + which kind + + echo "--- Install kubectl" + retry 5 curl -sSLo ${BIN_FOLDER}/kubectl "https://storage.googleapis.com/kubernetes-release/release/${K8S_VERSION}/bin/${platform_type_lowercase}/${arch_type}/kubectl" + chmod +x ${BIN_FOLDER}/kubectl + kubectl version --client + which kubectl +} + +with_yq() { + check_platform_architecture + local binary="yq_${platform_type_lowercase}_${arch_type}" + + retry 5 curl -sSL -o ${BIN_FOLDER}/yq.tar.gz "https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/${binary}.tar.gz" + + tar -C ${BIN_FOLDER} -xpf ${BIN_FOLDER}/yq.tar.gz ./${binary} + + mv ${BIN_FOLDER}/${binary} ${BIN_FOLDER}/yq + chmod +x ${BIN_FOLDER}/yq + yq --version + + rm -rf ${BIN_FOLDER}/yq.tar.gz +} + +## Logging and logout from Google Cloud +google_cloud_upload_auth() { + local secretFileLocation=$(mktemp -d -p "${WORKSPACE}" -t "${TMP_FOLDER_TEMPLATE_BASE}.XXXXXXXXX")/${GOOGLE_CREDENTIALS_FILENAME} + echo "${PRIVATE_INFRA_GCS_CREDENTIALS_SECRET}" > ${secretFileLocation} + gcloud auth activate-service-account --key-file ${secretFileLocation} 2> /dev/null + export GOOGLE_APPLICATION_CREDENTIALS=${secretFileLocation} +} + +google_cloud_signing_auth() { + local secretFileLocation=$(mktemp -d -p "${WORKSPACE}" -t "${TMP_FOLDER_TEMPLATE_BASE}.XXXXXXXXX")/${GOOGLE_CREDENTIALS_FILENAME} + echo "${SIGNING_PACKAGES_GCS_CREDENTIALS_SECRET}" > ${secretFileLocation} + gcloud auth activate-service-account --key-file ${secretFileLocation} 2> /dev/null + export GOOGLE_APPLICATION_CREDENTIALS=${secretFileLocation} +} + +google_cloud_auth_safe_logs() { + local gsUtilLocation=$(mktemp -d -p ${WORKSPACE} -t ${TMP_FOLDER_TEMPLATE}) + local secretFileLocation=${gsUtilLocation}/${GOOGLE_CREDENTIALS_FILENAME} + + echo "${PRIVATE_CI_GCS_CREDENTIALS_SECRET}" > ${secretFileLocation} + + gcloud auth activate-service-account --key-file ${secretFileLocation} 2> /dev/null + export GOOGLE_APPLICATION_CREDENTIALS=${secretFileLocation} +} + +google_cloud_logout_active_account() { + local active_account=$(gcloud auth list --filter=status:ACTIVE --format="value(account)" 2>/dev/null) + if [[ -n "$active_account" && -n "${GOOGLE_APPLICATION_CREDENTIALS+x}" ]]; then + echo "Logging out from GCP for active account" + gcloud auth revoke $active_account > /dev/null 2>&1 + else + echo "No active GCP accounts found." + fi + + if [ -n "${GOOGLE_APPLICATION_CREDENTIALS+x}" ]; then + rm -rf ${GOOGLE_APPLICATION_CREDENTIALS} + unset GOOGLE_APPLICATION_CREDENTIALS + fi +} + +## Helpers for integrations pipelines +check_git_diff() { + cd ${WORKSPACE} + echo "git update-index" + git update-index --refresh + echo "git diff-index" + git diff-index --exit-code HEAD -- +} + +use_elastic_package() { + echo "--- Installing elastic-package" + mkdir -p build + go build -o ${ELASTIC_PACKAGE_BIN} github.com/elastic/elastic-package +} + +is_already_published() { + local packageZip=$1 + + if curl -s --head https://package-storage.elastic.co/artifacts/packages/${packageZip} | grep -q "HTTP/2 200" ; then + echo "- Already published ${packageZip}" + return 0 + fi + echo "- Not published ${packageZip}" + return 1 +} + +create_kind_cluster() { + echo "--- Create kind cluster" + kind create cluster --config ${WORKSPACE}/kind-config.yaml --image kindest/node:${K8S_VERSION} +} + + +delete_kind_cluster() { + echo "--- Delete kind cluster" + kind delete cluster || true +} + +kibana_version_manifest() { + local kibana_version=$(cat manifest.yml | yq ".conditions.kibana.version") + if [ $kibana_version != "null" ]; then + echo "${kibana_version}" + return + fi + + kibana_version=$(cat manifest.yml | yq ".conditions.\"kibana.version\"") + if [ $kibana_version != "null" ]; then + echo "${kibana_version}" + return + fi + + echo "null" +} + +capabilities_manifest() { + cat manifest.yml | yq ".conditions.elastic.capabilities" +} + +is_supported_capability() { + if [ "${SERVERLESS_PROJECT}" == "" ]; then + return 0 + fi + + local capabilities=$(capabilities_manifest) + + # if no capabilities defined, it is available iavailable all projects + if [[ "${capabilities}" == "null" ]]; then + return 0 + fi + + if [[ ${SERVERLESS_PROJECT} == "observability" ]]; then + if echo ${capabilities} |egrep 'apm|observability|uptime' ; then + return 0 + else + return 1 + fi + fi + + if [[ ${SERVERLESS_PROJECT} == "security" ]]; then + if echo ${capabilities} |egrep 'security' ; then + return 0 + else + return 1 + fi + fi + + return 1 +} + +is_supported_stack() { + if [ "${STACK_VERSION}" == "" ]; then + return 0 + fi + + local kibana_version=$(kibana_version_manifest) + if [ "${kibana_version}" == "null" ]; then + return 0 + fi + if [[ ! ${kibana_version} =~ \^7\. && ${STACK_VERSION} =~ ^7\. ]]; then + return 1 + fi + if [[ ! ${kibana_version} =~ \^8\. && ${STACK_VERSION} =~ ^8\. ]]; then + return 1 + fi + return 0 +} + +oldest_supported_version() { + local kibana_version=$(cat manifest.yml | yq ".conditions.kibana.version") + if [ $kibana_version != "null" ]; then + python3 .buildkite/scripts/find_oldest_supported_version --manifest manifest.yml + return + fi + + kibana_version=$(cat manifest.yml | yq ".conditions.\"kibana.version\"") + if [ $kibana_version != "null" ]; then + python3 .buildkite/scripts/find_oldest_supported_version --manifest manifest.yml + return + fi + + echo "null" +} + +create_elastic_package_profile() { + local name="$1" + ${ELASTIC_PACKAGE_BIN} profiles create "${name}" + ${ELASTIC_PACKAGE_BIN} profiles use "${name}" +} + +prepare_serverless_stack() { + echo "--- Prepare serverless stack" + + local args="-v" + if [ -n "${STACK_VERSION}" ]; then + args="${args} --version ${STACK_VERSION}" + fi + + # Currently, if STACK_VERSION is not defined, for serverless it will be + # used as Elastic stack version (for agents) the default version in elastic-package + + # Creating a new profile allow to set a specific name for the serverless project + create_elastic_package_profile "integrations-${BUILDKITE_PULL_REQUEST}-${BUILDKITE_BUILD_NUMBER}-${SERVERLESS_PROJECT}" + + export EC_API_KEY=${EC_API_KEY_SECRET} + export EC_HOST=${EC_HOST_SECRET} + + echo "Boot up the Elastic stack" + ${ELASTIC_PACKAGE_BIN} stack up \ + -d \ + ${args} \ + --provider serverless \ + -U stack.serverless.region=${EC_REGION_SECRET},stack.serverless.type=${SERVERLESS_PROJECT} 2>&1 | egrep -v "^Password: " # To remove password from the output + echo "" + ${ELASTIC_PACKAGE_BIN} stack status + echo "" +} + +is_spec_3_0_0() { + local pkg_spec=$(cat manifest.yml | yq '.format_version') + local major_version=$(echo $pkg_spec | cut -d '.' -f 1) + + if [ ${major_version} -ge 3 ]; then + return 0 + fi + return 1 +} + +get_from_changeset() { + if [ "${BUILDKITE_PULL_REQUEST_BASE_BRANCH}" != "false" ]; then + # pull request + echo "origin/${BUILDKITE_PULL_REQUEST_BASE_BRANCH}" + return + fi + # main or backport branches + previous_commit=$(git rev-parse --verify FETCH_HEAD~1) + echo "${previous_commit}" +} + +get_to_changeset() { + echo "${BUILDKITE_COMMIT}" +} + +# TODO: it is required to have GIT_PREVIOUS_COMMIT and GIT_PREVIOUS_SUCCESSFUL_COMMIT +# as in Jenkins to set the right from (changesets) +is_pr_affected() { + local package="${1}" + + if ! is_supported_stack ; then + echo "[${package}] PR is not affected: unsupported stack (${STACK_VERSION})" + return 1 + fi + + if ! is_supported_capability ; then + echo "[${package}] PR is not affected: capabilities not mached with the project (${SERVERLESS_PROJECT})" + return 1 + fi + + if [[ ${FORCE_CHECK_ALL} == "true" ]];then + echo "[${package}] PR is affected: \"force_check_all\" parameter enabled" + return 0 + fi + + # setting default values for a PR + # TODO: get previous built commit as in Jenkins (groovy) + # def from = env.CHANGE_TARGET?.trim() ? "origin/${env.CHANGE_TARGET}" : "${env.GIT_PREVIOUS_COMMIT?.trim() ? env.GIT_PREVIOUS_COMMIT : env.GIT_BASE_COMMIT}" + local from="$(get_from_changeset)" + local to="$(get_to_changeset)" + + # TODO: If running for an integration branch (main, backport-*) check with + # GIT_PREVIOUS_SUCCESSFUL_COMMIT to check if the branch is still healthy. + # If this value is not available, check with last commit. + if [[ ${BUILDKITE_BRANCH} == "main" || ${BUILDKITE_BRANCH} =~ ^backport- ]]; then + echo "[${package}] PR is affected: running on ${BUILDKITE_BRANCH} branch" + # TODO: get previous successful commit as in Jenkins (groovy) + # from = env.GIT_PREVIOUS_SUCCESSFUL_COMMIT?.trim() ? env.GIT_PREVIOUS_SUCCESSFUL_COMMIT : "origin/${env.BRANCH_NAME}^" + from="origin/${BUILDKITE_BRANCH}^" + to="origin/${BUILDKITE_BRANCH}" + fi + + echo "[${package}] git-diff: check non-package files" + if git diff --name-only $(git merge-base ${from} ${to}) ${to} | egrep '^(packages/|.github/CODEOWNERS)' ; then + echo "[${package}] PR is affected: found non-package files" + return 0 + fi + echo "[${package}] git-diff: check package files" + if git diff --name-only $(git merge-base ${from} ${to}) ${to} | egrep '^packages/${package}/' ; then + echo "[${package}] PR is affected: found package files" + return 0 + fi + echo "[${package}] PR is not affected" + return 1 +} + +is_pr() { + if [[ "${BUILDKITE_PULL_REQUEST}" == "false" || "${BUILDKITE_TAG}" == "" ]]; then + return 0 + fi + return 1 +} + +kubernetes_service_deployer_used() { + find . -type d | egrep '_dev/deploy/k8s$' +} + +teardown_serverless_test_package() { + local package=$1 + local build_directory="${WORKSPACE}/build" + local dump_directory="${build_directory}/elastic-stack-dump/${package}" + + echo "Collect Elastic stack logs" + ${ELASTIC_PACKAGE_BIN} stack dump -v --output ${dump_directory} + + upload_safe_logs_from_package ${package} ${build_directory} +} + +teardown_test_package() { + local package=$1 + local build_directory="${WORKSPACE}/build" + local dump_directory="${build_directory}/elastic-stack-dump/${package}" + + echo "Collect Elastic stack logs" + ${ELASTIC_PACKAGE_BIN} stack dump -v --output ${dump_directory} + + upload_safe_logs_from_package ${package} ${build_directory} + + echo "Take down the Elastic stack" + ${ELASTIC_PACKAGE_BIN} stack down -v +} + +list_all_directories() { + find . -maxdepth 1 -mindepth 1 -type d | xargs -I {} basename {} | sort # |egrep "netskope|logstash|ti_rapid7" +} + +check_package() { + local package=$1 + echo "Check package: ${package}" + if ! ${ELASTIC_PACKAGE_BIN} check -v ; then + return 1 + fi + echo "" + return 0 +} + +install_package() { + local package=$1 + echo "Install package: ${package}" + if ! ${ELASTIC_PACKAGE_BIN} install -v ; then + return 1 + fi + echo "" + return 0 +} + +# Currently, system tests are not run in serverless to avoid lasting the build +# too much time, since all packages are run in the same step one by one. +# Packages are tested one by one to avoid creating more than 100 projects for one build. +test_package_in_serverless() { + local package=$1 + TEST_OPTIONS="-v --report-format xUnit --report-output file" + + echo "Test package: ${package}" + if ! ${ELASTIC_PACKAGE_BIN} test asset ${TEST_OPTIONS} --test-coverage ; then + return 1 + fi + if ! ${ELASTIC_PACKAGE_BIN} test static ${TEST_OPTIONS} --test-coverage ; then + return 1 + fi + # FIXME: adding test-coverage for serverless results in errors like this: + # Error: error running package pipeline tests: could not complete test run: error calculating pipeline coverage: error fetching pipeline stats for code coverage calculations: need exactly one ES node in stats response (got 4) + if ! ${ELASTIC_PACKAGE_BIN} test pipeline ${TEST_OPTIONS} ; then + return 1 + fi + echo "" + return 0 +} + +run_tests_package() { + local package=$1 + if ! check_package ${package} ; then + return 1 + fi + if ! install_package ${package} ; then + return 1 + fi + if [[ $SERVERLESS == "true" ]]; then + if ! test_package_in_serverless ${package} ; then + return 1 + fi + fi + + # TODO add if block to test packages locally as Jenkins performs + return 0 +} + +create_collapsed_annotation() { + local title="$1" + local file="$2" + local style="$3" + local context="$4" + + local annotation_file="tmp.annotation.md" + echo "
${title}" >> ${annotation_file} + echo -e "\n\n" >> ${annotation_file} + cat ${file} >> ${annotation_file} + echo "
" >> ${annotation_file} + + cat ${annotation_file} | buildkite-agent annotate --style "${style}" --context "${context}" + + rm -f ${annotation_file} +} + +upload_safe_logs() { + local bucket="$1" + local source="$2" + local target="$3" + + if ! ls ${source} 2>&1 > /dev/null ; then + echo "upload_safe_logs: artifacts files not found, nothing will be archived" + return + fi + + google_cloud_auth_safe_logs + + gsutil cp ${source} "gs://${bucket}/buildkite/${REPO_BUILD_TAG}/${target}" + + google_cloud_logout_active_account +} + +buildkite_pr_branch_build_id() { + if [ "${BUILDKITE_PULL_REQUEST}" == "false" ]; then + # add pipeline slug ad build_id to distinguish between integration and integrations-serverless builds + # when both are executed using main branch + echo "${BUILDKITE_BRANCH}-${BUILDKITE_PIPELINE_SLUG}-${BUILDKITE_BUILD_NUMBER}" + return + fi + echo "PR-${BUILDKITE_PULL_REQUEST}-${BUILDKITE_BUILD_NUMBER}" +} + +clean_safe_logs() { + rm -rf ${WORKSPACE}/build/elastic-stack-dump + rm -rf ${WORKSPACE}/build/container-logs +} + +upload_safe_logs_from_package() { + if [[ "${UPLOAD_SAFE_LOGS}" -eq 0 ]] ; then + return + fi + + local package=$1 + local build_directory=$2 + + local parent_folder="insecure-logs" + + upload_safe_logs \ + "${JOB_GCS_BUCKET_INTERNAL}" \ + "${build_directory}/elastic-stack-dump/${package}/logs/elastic-agent-internal/*.*" \ + "${parent_folder}/${package}/elastic-agent-logs/" + + # required for <8.6.0 + upload_safe_logs \ + "${JOB_GCS_BUCKET_INTERNAL}" \ + "${build_directory}/elastic-stack-dump/${package}/logs/elastic-agent-internal/default/*" \ + "${parent_folder}/${package}/elastic-agent-logs/default/" + + upload_safe_logs \ + "${JOB_GCS_BUCKET_INTERNAL}" \ + "${build_directory}/container-logs/*.log" \ + "${parent_folder}/${package}/container-logs/" +} diff --git a/.buildkite/scripts/find_oldest_supported_version.py b/.buildkite/scripts/find_oldest_supported_version.py new file mode 100644 index 00000000000..3a7be4d04b4 --- /dev/null +++ b/.buildkite/scripts/find_oldest_supported_version.py @@ -0,0 +1,272 @@ +#!/bin/env python3 +import argparse +import requests +import sys +import yaml +import unittest + +VERSION_URL = "https://artifacts-api.elastic.co/v1/versions?x-elastic-no-kpi=true" + + +def fetch_version(): + return requests.get(VERSION_URL).json() + + +def find_oldest_supported_version(kibana_version_condition: str) -> str: + # The logic of this function is copied from https://github.com/elastic/apm-pipeline-library/blob/main/vars/findOldestSupportedVersion.groovy + + if "||" in kibana_version_condition and kibana_version_condition.index("||") >= 0: + return handle_or(kibana_version_condition) + + available_versions = fetch_version() + version = remove_operator(kibana_version_condition) + parts = version.split(".") + + # If this is specifying a major or a minor only, check with the zero version. + while len(parts) < 3: + version += ".0" + parts.append("0") + + major, minor, patch = parts[0], parts[1], parts[2] + + # Use the snapshot if this is the last patch version. + next_patch = ".".join((major, minor, str(int(patch)+1))) + next_patch_exists = ( + next_patch in available_versions.get("versions", []) or + f"{next_patch}-SNAPSHOT" in available_versions.get("versions", []) + ) + + snapshot_version = f"{version}-SNAPSHOT" + if not next_patch_exists and (snapshot_version in available_versions.get("versions", [])): + return snapshot_version + + # Use the version as is if it exists. + if version in available_versions.get("version", []): + return version + + # Old minors may not be available in artifacts-api, if it is older + # than the others in the same major, return the version as is. + older = True + for available_version in available_versions.get("versions", []): + available_parts = available_version.split(".") + if len(available_parts) < 2: + continue + + available_major = available_parts[0] + available_minor = available_parts[1] + if major == available_major and minor > available_minor: + older = False + break + if older: + return version + + # If no version has been found so far, try with the snapshot of the next version + # in the current major. + major_snapshot = f"{major}.x-SNAPSHOT" + if major_snapshot in available_versions.get("aliases", []): + return major_snapshot + + # Otherwise, return it, whatever this is. + return version + + +def remove_operator(kibana_version_condition: str) -> str: + if kibana_version_condition[0].isdigit(): + return kibana_version_condition + elif kibana_version_condition.startswith("^") or kibana_version_condition.startswith("~"): + return kibana_version_condition[1:] + elif kibana_version_condition.startswith(">="): + return kibana_version_condition[2:] + raise Exception("kibana version condition supports only ^, ~ and >= operators") + + +def handle_or(kibana_version_condition: str): + if "||" not in kibana_version_condition: + raise Exception(f"no conditions found in '{kibana_version_condition}'") + + conditions = kibana_version_condition.split("||") + result = "" + for cond in conditions: + candidate = find_oldest_supported_version(cond) + if result == "" or candidate < result: + result = candidate + + return result + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Prepare Elastic stack") + parser.add_argument("--manifest-path", + required=False, + default="manifest.yml", + help="path of manifest file") + parser.add_argument("--test", + required=False, + action="store_true", + default=False, + help="trigger test") + + args, unknown = parser.parse_known_args() + # Set this for unittest + sys.argv[1:] = unknown + return args + + +def run(cfg: argparse.Namespace): + with open(cfg.manifest_path, "r") as src: + manifest_doc = yaml.safe_load(src) + + kibana_version_condition = "" + if "kibana.version" in manifest_doc["conditions"]: + kibana_version_condition = manifest_doc["conditions"]["kibana.version"] + elif "kibana" in manifest_doc["conditions"]: + kibana_version_condition = manifest_doc["conditions"]["kibana"]["version"] + + if kibana_version_condition: + print(find_oldest_supported_version(kibana_version_condition), end="") + else: + print("null") + + +def main(): + cfg = parse_args() + + if cfg.test: + unittest.main() + else: + run(cfg) + + +# Test: meant to run locally with --test + +class TestFindOldestSupportVersion(unittest.TestCase): + """Testcase for find_oldest_supported_version.""" + + mock_data = { + "versions": [ + "7.17.10", + "7.17.11", + "7.17.12", + "7.17.13-SNAPSHOT", + "7.17.13", + "7.17.14-SNAPSHOT", + "8.7.0", + "8.7.1", + "8.8.0", + "8.8.1", + "8.8.2", + "8.9.0", + "8.9.1-SNAPSHOT", + "8.9.1", + "8.9.2-SNAPSHOT", + "8.9.2", + "8.10.0-SNAPSHOT", + "8.10.0", + "8.10.1-SNAPSHOT", + "8.11.0-SNAPSHOT" + ], + "aliases": [ + "7.17-SNAPSHOT", + "7.17", + "8.7", + "8.8", + "8.9-SNAPSHOT", + "8.9", + "8.10-SNAPSHOT", + "8.10", + "8.11-SNAPSHOT" + ], + "manifests": { + "last-update-time": "Thu, 14 Sep 2023 16:03:46 UTC", + "seconds-since-last-update": 107 + } + } + + def setUp(self): + super().setUp() + global fetch_version + self._fetch_version = fetch_version + def fetch_version(): return self.mock_data + + def tearDown(self): + global fetch_version + fetch_version = self._fetch_version + super().tearDown() + + def test_next_patch_does_not_exits_and_available_version_contains_snapshot(self): + self.assertEqual(find_oldest_supported_version("7.17.14"), "7.17.14-SNAPSHOT") + self.assertEqual(find_oldest_supported_version("8.9.2"), "8.9.2-SNAPSHOT") + self.assertEqual(find_oldest_supported_version("8.10.1"), "8.10.1-SNAPSHOT") + self.assertEqual(find_oldest_supported_version("8.11.0"), "8.11.0-SNAPSHOT") + + def test_available_version_contains_kibana_version(self): + self.assertEqual(find_oldest_supported_version("7.17.10"), "7.17.10") + self.assertEqual(find_oldest_supported_version("7.17.11"), "7.17.11") + self.assertEqual(find_oldest_supported_version("7.17.12"), "7.17.12") + self.assertEqual(find_oldest_supported_version("7.17.13"), "7.17.13") + self.assertEqual(find_oldest_supported_version("8.7.1"), "8.7.1") + self.assertEqual(find_oldest_supported_version("8.10.0"), "8.10.0") + + def test_too_old_to_be_in_api(self): + self.assertEqual(find_oldest_supported_version("7.16.0"), "7.16.0") + self.assertEqual(find_oldest_supported_version("8.6.0"), "8.6.0") + + def test_or(self): + self.assertEqual(find_oldest_supported_version("8.6.0||8.7.0"), "8.6.0") + self.assertEqual(find_oldest_supported_version("8.9.2||8.9.1||7.17.14"), "7.17.14-SNAPSHOT") + + def test_mix(self): + self.assertEqual(find_oldest_supported_version("^8.6.0||~8.7.0"), "8.6.0") + self.assertEqual(find_oldest_supported_version("8.9.2||8.9.1||7.17.14"), "7.17.14-SNAPSHOT") + self.assertEqual(find_oldest_supported_version( + "~8.9.2||>=8.11.0||7.17.14"), "7.17.14-SNAPSHOT") + + +class TestRemoveOperator(unittest.TestCase): + """Testcase for remove_operator.""" + + def test_no_operator(self): + self.assertEqual(remove_operator("1.0.0"), "1.0.0") + + def test_circumflex(self): + self.assertEqual(remove_operator("^1.0.0"), "1.0.0") + + def test_tilda(self): + self.assertEqual(remove_operator("~1.0.0"), "1.0.0") + + def test_greater_or_equal(self): + self.assertEqual(remove_operator(">=1.0.0"), "1.0.0") + + def test_unknown(self): + with self.assertRaises(Exception): + remove_operator("<=1.0.0") + with self.assertRaises(Exception): + remove_operator("==1.0.0") + + +class TestHandleOr(unittest.TestCase): + """Testcase for handle_or.""" + + def test_single_condition(self): + with self.assertRaises(Exception): + handle_or("0.0.1") + + def test_happy_path(self): + # Mock temporarly with the identiy function. + global find_oldest_supported_version + old_func = find_oldest_supported_version + def find_oldest_supported_version(x): return x + + self.assertEqual(handle_or("0.1||0.2"), "0.1") + self.assertEqual(handle_or("0.2||0.2.1||0.2.3-alpha"), "0.2") + self.assertEqual(handle_or("1.2||1.2.0||3.2.3-alpha"), "1.2") + + # restore mock + find_oldest_supported_version = old_func + + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + pass diff --git a/.buildkite/scripts/publish_packages.sh b/.buildkite/scripts/publish_packages.sh new file mode 100755 index 00000000000..241781796a3 --- /dev/null +++ b/.buildkite/scripts/publish_packages.sh @@ -0,0 +1,119 @@ +#!/bin/bash + +source .buildkite/scripts/common.sh +set -euo pipefail + +if [ ${SKIP_PUBLISHING:-"false"} == "true" ] ; then + echo "packageStoragePublish: skipping because skip_publishing param is ${SKIP_PUBLISHING}" + exit 0 +fi + +export BUILD_TAG="buildkite-${BUILDKITE_PIPELINE_SLUG}-${BUILDKITE_BUILD_NUMBER}" +export REPO_BUILD_TAG="${REPO_NAME}/${BUILD_TAG}" + +JENKINS_TRIGGER_PATH=".buildkite/scripts/triggerJenkinsJob" + +# signing +INFRA_SIGNING_BUCKET_NAME='internal-ci-artifacts' +INFRA_SIGNING_BUCKET_SIGNED_ARTIFACTS_SUBFOLDER="${REPO_BUILD_TAG}/signed-artifacts" +INFRA_SIGNING_BUCKET_ARTIFACTS_PATH="gs://${INFRA_SIGNING_BUCKET_NAME}/${REPO_BUILD_TAG}" +INFRA_SIGNING_BUCKET_SIGNED_ARTIFACTS_PATH="gs://${INFRA_SIGNING_BUCKET_NAME}/${INFRA_SIGNING_BUCKET_SIGNED_ARTIFACTS_SUBFOLDER}" + + +skipPublishing() { + if [[ "${BUILDKITE_PULL_REQUEST}" != "false" ]]; then + return 0 + fi + + if [[ "${BUILDKITE_BRANCH}" == "main" ]]; then + return 1 + fi + if [[ "${BUILDKITE_BRANCH}" =~ ^backport- ]]; then + return 1 + fi + + return 0 +} + +check_and_build_package() { + ${ELASTIC_PACKAGE_BIN} check + ${ELASTIC_PACKAGE_BIN} build --zip +} + +report_build_failure() { + local integration="${1}" + echo "Build package ${integration}failed" + + # if running in Buildkite , add an annotation + if [ -n "$BUILDKITE_BRANCH" ]; then + buildkite-agent annotate "Build package ${integration} failed" --style "warning" + fi +} + +build_packages() { + pushd packages > /dev/null + + for it in $(find . -maxdepth 1 -mindepth 1 -type d); do + integration=$(basename ${it}) + echo "Package ${integration}: check" + + pushd ${integration} > /dev/null + + version=$(cat manifest.yml | yq .version) + name=$(cat manifest.yml | yq .name) + + package_zip="${name}-${version}.zip" + + if is_already_published ${package_zip} ; then + echo "Skipping. ${package_zip} already published" + popd > /dev/null + continue + fi + + echo "Build integration as zip: ${integration}" + check_and_build_package || report_build_failure ${integration} + popd > /dev/null + + unpublished="true" + done + popd > /dev/null +} + +sign_packages() { + echo "Signing packages" + # TODO require signing: to be based on elastic-package +} + +publish_packages() { + echo "Publishing packages" + # TODO require publishing: to be based on elastic-package +} + +if skipPublishing ; then + echo "packageStoragePublish: not the main branch or a backport branch, nothing will be published" + exit 0 +fi + +echo "Checking gsutil command..." +if ! command -v gsutil &> /dev/null ; then + echo "⚠️ gsutil is not installed" + exit 1 +fi + +add_bin_path +with_go +with_yq +use_elastic_package + +unpublished="false" + +build_packages + + +if [ "${unpublished}" == "false" ]; then + echo "All packages are in sync" + exit 0 +fi + +sign_packages +publish_packages diff --git a/.buildkite/scripts/test_integrations_with_serverless.sh b/.buildkite/scripts/test_integrations_with_serverless.sh new file mode 100755 index 00000000000..aa700659411 --- /dev/null +++ b/.buildkite/scripts/test_integrations_with_serverless.sh @@ -0,0 +1,126 @@ +#!/bin/bash + +source .buildkite/scripts/common.sh + +set -euo pipefail + +# default values +SERVERLESS=${SERVERLESS:-"false"} +STACK_VERSION=${STACK_VERSION:-""} +FORCE_CHECK_ALL=${FORCE_CHECK_ALL:-"false"} +SKIP_PUBLISHING=${SKIP_PUBLISHING:-"false"} + +SKIPPED_PACKAGES_FILE_PATH="${WORKSPACE}/skipped_packages.txt" +FAILED_PACKAGES_FILE_PATH="${WORKSPACE}/failed_packages.txt" + +if running_on_buildkite; then + # just get the value from meta-data if it is running on Buildkite + if buildkite-agent meta-data exists SERVERLESS_PROJECT; then + SERVERLESS_PROJECT="$(buildkite-agent meta-data get SERVERLESS_PROJECT)" + fi +fi + +SERVERLESS_PROJECT=${SERVERLESS_PROJECT:-"observability"} +echo "Running packages on Serverles project type: ${SERVERLESS_PROJECT}" +if running_on_buildkite; then + buildkite-agent annotate "Serverless Project: ${SERVERLESS_PROJECT}" --context "ctx-info-${SERVERLESS_PROJECT}" --style "info" +fi + + +if [ ! -d packages ]; then + echo "Missing packages folder" + if running_on_buildkite ; then + buildkite-agent annotate "Missing packages folder" --style "error" + fi + exit 1 +fi + +add_bin_path + +with_yq +with_mage +with_docker_compose +with_kubernetes + +check_package_in_serverless() { + local package="$1" + + echo "--- Package ${package}: check" + pushd ${package} > /dev/null + + clean_safe_logs + + if [[ ${SERVERLESS} == "true" ]] ; then + if [[ "${package}" == "fleet_server" ]]; then + echo "fleet_server not supported. Skipped" + echo "- [${package}] not supported" >> ${SKIPPED_PACKAGES_FILE_PATH} + popd > /dev/null + return + fi + if ! is_spec_3_0_0 ; then + echo "Not v3 spec version. Skipped" + echo "- [${package}] spec <3.0.0" >> ${SKIPPED_PACKAGES_FILE_PATH} + popd > /dev/null + return + fi + fi + + if ! reason=$(is_pr_affected ${package}) ; then + echo "${reason}" + echo "- ${reason}" >> ${SKIPPED_PACKAGES_FILE_PATH} + popd > /dev/null + return + fi + + use_kind=0 + if kubernetes_service_deployer_used ; then + echo "Kubernetes service deployer is used. Creating Kind cluster" + use_kind=1 + create_kind_cluster + fi + + if ! run_tests_package ${package} ; then + echo "[${package}] run_tests_package failed" + echo "- ${package}" >> ${FAILED_PACKAGES_FILE_PATH} + any_package_failing=1 + fi + + # TODO: add benchmarks support (https://github.com/elastic/integrations/blob/befdc5cb752a08aaf5f79b0d9bdb68588ade9f27/.ci/Jenkinsfile#L180) + # ${ELASTIC_PACKAGE_BIN} benchmark pipeline -v --report-format json --report-output file + + if [ ${use_kind} -eq 1 ]; then + delete_kind_cluster + fi + + teardown_serverless_test_package ${package} + + popd > /dev/null +} + +use_elastic_package + +prepare_serverless_stack + +any_package_failing=0 + +pushd packages > /dev/null +for package in $(list_all_directories); do + check_package_in_serverless ${package} +done +popd > /dev/null + +if running_on_buildkite ; then + if [ -f ${SKIPPED_PACKAGES_FILE_PATH} ]; then + create_collapsed_annotation "Skipped packages in ${SERVERLESS_PROJECT}" ${SKIPPED_PACKAGES_FILE_PATH} "info" "ctx-skipped-packages-${SERVERLESS_PROJECT}" + fi + + if [ -f ${FAILED_PACKAGES_FILE_PATH} ]; then + create_collapsed_annotation "Failed packages in ${SERVERLESS_PROJECT}" ${FAILED_PACKAGES_FILE_PATH} "error" "ctx-failed-packages-${SERVERLESS_PROJECT}" + fi +fi + +if [ $any_package_failing -eq 1 ] ; then + echo "These packages have failed:" + cat ${FAILED_PACKAGES_FILE_PATH} + exit 1 +fi diff --git a/.ci/Jenkinsfile b/.ci/Jenkinsfile index fa13f663739..f8996fbfbb6 100644 --- a/.ci/Jenkinsfile +++ b/.ci/Jenkinsfile @@ -143,7 +143,7 @@ pipeline { archiveArtifacts(allowEmptyArchive: true, artifacts: "build/elastic-stack-dump/${it}/logs/*.log, build/elastic-stack-dump/${it}/logs/fleet-server-internal/**/*") archiveArtifactsSafe("insecure-logs/${it}/elastic-agent-logs/", "build/elastic-stack-dump/${it}/logs/elastic-agent-internal/*.*") archiveArtifactsSafe("insecure-logs/${it}/elastic-agent-logs/default/", "build/elastic-stack-dump/${it}/logs/elastic-agent-internal/default/*") - archiveArtifactsSafe("insecure-logs/${it}/container-logs", "build/container-logs/*.log") + archiveArtifactsSafe("insecure-logs/${it}/container-logs/", "build/container-logs/*.log") sh(label: "Take down the Elastic stack", script: 'build/elastic-package stack down -v') stashCoverageReport() } diff --git a/catalog-info.yaml b/catalog-info.yaml index b0259d4829f..55118198fdc 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -63,3 +63,99 @@ spec: access_level: MANAGE_BUILD_AND_READ everyone: access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: pipeline-integrations-schedule-daily + description: 'Daily pipeline for the Integrations project' + links: + - title: Pipeline + url: https://buildkite.com/elastic/integrations-schedule-daily + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: integrations-schedule-daily + description: 'Daily pipeline for the Integrations project' + spec: + pipeline_file: ".buildkite/pipeline.schedule-daily.yml" + schedules: + main_daily: + branch: "main" + cronline: "@daily" + message: "Run the daily jobs" + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true + build_tags: false + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/integrations + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: '!main' + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: '!main' + env: + ELASTIC_SLACK_NOTIFICATIONS_ENABLED: 'true' + # SLACK_NOTIFICATIONS_CHANNEL: '#beats-build' as the current jenkins schedule-daily? + SLACK_NOTIFICATIONS_CHANNEL: '#ingest-test-notifications' #TODO: will be changed before GO-PROD + SLACK_NOTIFICATIONS_ALL_BRANCHES: 'true' + SLACK_NOTIFICATIONS_ON_SUCCESS: 'true' + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY + +--- +# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json +apiVersion: backstage.io/v1alpha1 +kind: Resource +metadata: + name: pipeline-integrations-serverless + description: 'Pipeline for the Integrations project in serverless' + links: + - title: Pipeline + url: https://buildkite.com/elastic/integrations-serverless + +spec: + type: buildkite-pipeline + owner: group:ingest-fp + system: buildkite + implementation: + apiVersion: buildkite.elastic.dev/v1 + kind: Pipeline + metadata: + name: integrations-serverless + description: 'Pipeline for the Integrations project in serverless' + spec: + pipeline_file: ".buildkite/pipeline.serverless.yml" + provider_settings: + trigger_mode: none # don't trigger jobs from github activity + build_pull_request_forks: false + build_pull_requests: true + build_tags: false + filter_enabled: true + filter_condition: >- + build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null) + repository: elastic/integrations + cancel_intermediate_builds: true + cancel_intermediate_builds_branch_filter: '!main' + skip_intermediate_builds: true + skip_intermediate_builds_branch_filter: '!main' + teams: + ingest-fp: + access_level: MANAGE_BUILD_AND_READ + everyone: + access_level: READ_ONLY +