Skip to content

Commit

Permalink
(debug) Fastens CI builds
Browse files Browse the repository at this point in the history
  • Loading branch information
XComp committed Apr 2, 2024
1 parent f97b011 commit 224129c
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 320 deletions.
25 changes: 3 additions & 22 deletions .github/workflows/docs.yml
Expand Up @@ -21,7 +21,7 @@ on:

jobs:
extract-release-branches:
if: github.repository == 'apache/flink'
#if: github.repository == 'apache/flink'
name: "Extracts release branch information"
runs-on: "ubuntu-latest"
outputs:
Expand Down Expand Up @@ -80,27 +80,8 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ matrix.branch }}
- name: "Build documentation"
run: |
docker run --rm --volume "$PWD:/root/flink" chesnay/flink-ci:java_8_11_17_21_maven_386 bash -c "cd /root/flink && ./.github/workflows/docs.sh"
- name: "Upload documentation for ${{ matrix.branch }}"
uses: burnett01/rsync-deployments@5.2
with:
switches: --archive --compress
path: docs/target/
remote_path: ${{ secrets.NIGHTLIES_RSYNC_PATH }}/flink/flink-docs-${{ env.flink_branch }}/
remote_host: ${{ secrets.NIGHTLIES_RSYNC_HOST }}
remote_port: ${{ secrets.NIGHTLIES_RSYNC_PORT }}
remote_user: ${{ secrets.NIGHTLIES_RSYNC_USER }}
remote_key: ${{ secrets.NIGHTLIES_RSYNC_KEY }}
run: echo ${{ matrix.branch }}
- name: "Upload documentation for ${{ matrix.branch }} with alias ${{ matrix.alias }}"
if: matrix.alias != ''
uses: burnett01/rsync-deployments@5.2
with:
switches: --archive --compress
path: docs/target/
remote_path: ${{ secrets.NIGHTLIES_RSYNC_PATH }}/flink/flink-docs-${{ env.flink_alias }}/
remote_host: ${{ secrets.NIGHTLIES_RSYNC_HOST }}
remote_port: ${{ secrets.NIGHTLIES_RSYNC_PORT }}
remote_user: ${{ secrets.NIGHTLIES_RSYNC_USER }}
remote_key: ${{ secrets.NIGHTLIES_RSYNC_KEY }}
run: echo ${{ matrix.branch}} with ${{ matrix.alias }}
3 changes: 2 additions & 1 deletion .github/workflows/nightly-trigger.yml
Expand Up @@ -21,10 +21,11 @@ name: "Nightly trigger (beta)"
on:
schedule:
- cron: '0 2 * * *'
workflow_dispatch:

jobs:
Trigger:
if: github.repository == 'apache/flink'
#if: github.repository == 'apache/flink'
permissions:
actions: write
runs-on: ubuntu-latest
Expand Down
4 changes: 0 additions & 4 deletions .github/workflows/nightly.yml
Expand Up @@ -24,10 +24,6 @@ on:
permissions: read-all

jobs:
pre-compile-checks:
name: "Pre-compile Checks"
uses: ./.github/workflows/template.pre-compile-checks.yml

nightly:
strategy:
# each Flink CI run occupies 10 jobs in parallel
Expand Down
293 changes: 0 additions & 293 deletions .github/workflows/template.flink-ci.yml
Expand Up @@ -105,296 +105,3 @@ jobs:
# - removes leading and following dashes
stringified_workflow_name=$(echo "${{ github.workflow }}-${{ inputs.workflow-caller-id }}" | tr -C '[:alnum:]._' '-' | tr '[:upper:]' '[:lower:]' | sed -e 's/--*/-/g' -e 's/^-*//g' -e 's/-*$//g')
echo "stringified-workflow-name=${stringified_workflow_name}" >> $GITHUB_OUTPUT
- name: "Compile Flink"
uses: "./.github/actions/run_mvn"
timeout-minutes: ${{ fromJSON(env.GHA_JOB_TIMEOUT) }}
with:
working_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
maven-parameters: "test-compile -Dflink.markBundledAsOptional=false -Dfast"
env: "${{ inputs.environment }}"

- name: "Collect build artifacts"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: |
./tools/azure-pipelines/create_build_artifact.sh
- name: "Upload artifacts to make them available in downstream jobs"
uses: actions/upload-artifact@v4
with:
name: build-artifacts-${{ steps.workflow-prep-step.outputs.stringified-workflow-name }}-${{ github.run_number }}
path: ${{ env.FLINK_ARTIFACT_DIR }}/${{ env.FLINK_ARTIFACT_FILENAME }}
if-no-files-found: error
# use minimum here because we only need these artifacts to speed up the build
retention-days: 1

packaging:
name: "Test packaging/licensing"
needs: compile
runs-on: ubuntu-22.04
container:
image: mapohl/flink-ci:FLINK-34194
# --init makes the process in the container being started as an init process which will clean up any daemon processes during shutdown
# --privileged allows writing coredumps in docker (FLINK-16973)
options: --init --privileged

steps:
- name: "Flink Checkout"
uses: actions/checkout@v4
with:
persist-credentials: false

- name: "Initialize job"
uses: "./.github/actions/job_init"
with:
jdk_version: ${{ inputs.jdk_version }}
maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
source_directory: ${{ env.MOUNTED_WORKING_DIR }}
target_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}

- name: "Download build artifacts from compile job"
uses: actions/download-artifact@v4
with:
name: build-artifacts-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
path: ${{ env.FLINK_ARTIFACT_DIR }}

- name: "Unpack build artifact"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: |
./tools/azure-pipelines/unpack_build_artifact.sh
- name: "Test"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: |
${{ inputs.environment }} ./tools/ci/compile_ci.sh || exit $?
test:
name: "Test (module: ${{ matrix.module }})"
needs: compile
runs-on: ubuntu-22.04
container:
image: mapohl/flink-ci:FLINK-34194
# --init makes the process in the container being started as an init process which will clean up any daemon processes during shutdown
# --privileged allows writing coredumps in docker (FLINK-16973)
options: --init --privileged
# the /mnt folder is a separate disk mounted to the host filesystem with more free disk space that can be utilized
volumes:
- /mnt:/root
env:
# timeout in minutes - this environment variable is required by uploading_watchdog.sh
GHA_JOB_TIMEOUT: 240
strategy:
fail-fast: false
matrix:
include:
- module: core
stringified-module-name: core
- module: python
stringified-module-name: python
- module: table
stringified-module-name: table
- module: connect
stringified-module-name: connect
- module: tests
stringified-module-name: tests
- module: misc
stringified-module-name: misc

steps:
- name: "Flink Checkout"
uses: actions/checkout@v4
with:
persist-credentials: false

- name: "Initialize job"
uses: "./.github/actions/job_init"
with:
jdk_version: ${{ inputs.jdk_version }}
maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
source_directory: ${{ env.MOUNTED_WORKING_DIR }}
target_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}

- name: "Set coredump pattern"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: sudo sysctl -w kernel.core_pattern=core.%p

- name: "Download build artifacts from compile job"
uses: actions/download-artifact@v4
with:
name: build-artifacts-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
path: ${{ env.FLINK_ARTIFACT_DIR }}

- name: "Unpack build artifact"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: |
./tools/azure-pipelines/unpack_build_artifact.sh
- name: "Try loading Docker images from Cache"
id: docker-cache
uses: actions/cache@v4
with:
path: ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
key: ${{ matrix.module }}-docker-${{ runner.os }}-${{ hashFiles('**/cache_docker_images.sh', '**/flink-test-utils-parent/**/DockerImageVersions.java') }}
restore-keys: ${{ matrix.module }}-docker-${{ runner.os }}

- name: "Load Docker images if not present in cache, yet"
if: ${{ !cancelled() && !steps.docker-cache.cache.hit }}
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: ./tools/azure-pipelines/cache_docker_images.sh load

- name: "Test - ${{ matrix.module }}"
id: test-run
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
env:
IT_CASE_S3_BUCKET: ${{ secrets.s3_bucket }}
IT_CASE_S3_ACCESS_KEY: ${{ secrets.s3_access_key }}
IT_CASE_S3_SECRET_KEY: ${{ secrets.s3_secret_key }}
timeout-minutes: ${{ fromJSON(env.GHA_JOB_TIMEOUT) }}
run: |
${{ inputs.environment }} PROFILE="$PROFILE -Pgithub-actions" ./tools/azure-pipelines/uploading_watchdog.sh \
./tools/ci/test_controller.sh ${{ matrix.module }}
- name: "Post-build Disk Info"
if: ${{ always() }}
shell: bash
run: df -h

- name: "Top 15 biggest directories in terms of used disk space"
if: ${{ always() }}
shell: bash
run: |
du -ah --exclude="proc" -t100M . | sort -h -r | head -n 15
- name: "Post-process build artifacts"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: find ${{ steps.test-run.outputs.debug-files-output-dir }} -type f -exec rename 's/[:<>|*?]/-/' {} \;

- name: "Upload build artifacts"
uses: actions/upload-artifact@v4
if: ${{ failure() && steps.test-run.outputs.debug-files-output-dir }} != ''
with:
name: logs-test-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}-${{ matrix.stringified-module-name }}-${{ steps.test-run.outputs.debug-files-name }}
path: ${{ steps.test-run.outputs.debug-files-output-dir }}

- name: "Save Docker images to cache"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
if: ${{ !cancelled() && (failure() || !steps.docker-cache.cache.hit) }}
run: ./tools/azure-pipelines/cache_docker_images.sh save

e2e:
name: "E2E (group ${{ matrix.group }})"
needs: compile
# the end to end tests are not executed in Flink's CI Docker container due to problems when running Docker-in-Docker
runs-on: ubuntu-22.04
timeout-minutes: 310
env:
# timeout in minutes - this environment variable is required by uploading_watchdog.sh
GHA_JOB_TIMEOUT: 310

# required and forwarded as the cache-dir system property to DownloadCacheFactory
# implementations in flink-end-to-end-tests/flink-end-to-end-tests-common
E2E_CACHE_FOLDER: ${{ github.workspace }}/.e2e-cache
# required by common_artifact_download_cacher.sh in flink-end-to-end-tests/test-scripts
E2E_TARBALL_CACHE: ${{ github.workspace }}/.e2e-tar-cache

# The following environment variables need to be overwritten here because the e2e tests do not
# run in containers.
MAVEN_REPO_FOLDER: ${{ github.workspace }}/.m2/repository
MAVEN_ARGS: -Dmaven.repo.local=${{ github.workspace }}/.m2/repository
FLINK_ARTIFACT_DIR: ${{ github.workspace }}
DOCKER_IMAGES_CACHE_FOLDER: ${{ github.workspace }}/.docker-cache
strategy:
fail-fast: false
matrix:
group: [1, 2]

steps:
- name: "Flink Checkout"
uses: actions/checkout@v4
with:
persist-credentials: false

- name: "Initialize job"
uses: "./.github/actions/job_init"
with:
jdk_version: ${{ inputs.jdk_version }}
maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}

- name: "Install missing packages"
run: sudo apt-get install -y net-tools docker-compose zip

# netty-tcnative requires OpenSSL v1.0.0
- name: "Install OpenSSL"
run: |
sudo apt-get install -y bc libapr1
wget -r --no-parent -nd --accept=libssl1.0.0_*ubuntu5.*_amd64.deb http://security.ubuntu.com/ubuntu/pool/main/o/openssl1.0/
sudo apt install ./libssl1.0.0_*.deb
- name: "Download build artifacts from compile job"
uses: actions/download-artifact@v4
with:
name: build-artifacts-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
path: ${{ env.FLINK_ARTIFACT_DIR }}

- name: "Unpack build artifact"
run: ./tools/azure-pipelines/unpack_build_artifact.sh

# the cache task does not create directories on a cache miss, and can later fail when trying to tar the directory if the test haven't created it
# this may for example happen if a given directory is only used by a subset of tests, which are run in a different 'group'
- name: "Create cache directories"
run: |
mkdir -p ${{ env.E2E_CACHE_FOLDER }}
mkdir -p ${{ env.E2E_TARBALL_CACHE }}
mkdir -p ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
- name: "Load E2E files from Cache"
uses: actions/cache@v4
with:
path: ${{ env.E2E_CACHE_FOLDER }}
key: e2e-cache-${{ matrix.group }}-${{ hashFiles('**/flink-end-to-end-tests/**/*.java', '!**/avro/**') }}

- name: "Load E2E artifacts from Cache"
uses: actions/cache@v4
with:
path: ${{ env.E2E_TARBALL_CACHE }}
key: e2e-artifact-cache-${{ matrix.group }}-${{ hashFiles('**/flink-end-to-end-tests/**/*.sh') }}
restore-keys: e2e-artifact-cache-${{ matrix.group }}

- name: "Try loading Docker images from Cache"
id: docker-cache
uses: actions/cache@v4
with:
path: ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
key: e2e-${{ matrix.group }}-docker-${{ runner.os }}-${{ hashFiles('**/cache_docker_images.sh', '**/flink-test-utils-parent/**/DockerImageVersions.java') }}

- name: "Load Docker images if not present in Cache, yet"
if: ${{ !cancelled() && !steps.docker-cache.cache.hit }}
run: ./tools/azure-pipelines/cache_docker_images.sh load

- name: "Build Flink"
uses: "./.github/actions/run_mvn"
with:
maven-parameters: "install -DskipTests -Dfast $PROFILE -Pskip-webui-build"
env: "${{ inputs.environment }}"

- name: "Run E2E Tests"
id: test-run
env:
IT_CASE_S3_BUCKET: ${{ secrets.s3_bucket }}
IT_CASE_S3_ACCESS_KEY: ${{ secrets.s3_access_key }}
IT_CASE_S3_SECRET_KEY: ${{ secrets.s3_secret_key }}
timeout-minutes: ${{ fromJSON(env.GHA_JOB_TIMEOUT) }}
run: |
${{ inputs.environment }} FLINK_DIR=`pwd`/build-target PROFILE="$PROFILE -Pgithub-actions" ./tools/azure-pipelines/uploading_watchdog.sh \
flink-end-to-end-tests/run-nightly-tests.sh ${{ matrix.group }}
- name: "Upload Logs"
uses: actions/upload-artifact@v4
if: ${{ failure() && steps.test-run.outputs.debug-files-output-dir != '' }}
with:
name: logs-e2e-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}-${{ matrix.group }}-${{ steps.test-run.outputs.debug-files-name }}
path: ${{ steps.test-run.outputs.debug-files-output-dir }}

- name: "Save Docker images to Cache"
if: ${{ !cancelled() && (failure() || !steps.docker-cache.cache.hit) }}
run: ./tools/azure-pipelines/cache_docker_images.sh save

0 comments on commit 224129c

Please sign in to comment.