diff --git a/.github/workflows/complete-artifact-matrix-unsupported.yml b/.github/workflows/complete-artifact-matrix-unsupported.yml
new file mode 100644
index 0000000..cf0a422
--- /dev/null
+++ b/.github/workflows/complete-artifact-matrix-unsupported.yml
@@ -0,0 +1,5285 @@
+
+# template file: 050.single_header.yaml
+
+name: "Build Community Maintained Images (admin)"
+
+on:
+
+ workflow_call:
+ inputs:
+ ref: # commit id
+ required: false
+ type: string
+ extraParamsAllBuilds: # addional build parameter
+ required: false
+ type: string
+ secrets:
+ ORG_MEMBERS:
+ required: true
+ workflow_dispatch:
+ inputs:
+ skipImages:
+ description: 'Skip building images? no = build images, yes = skip images'
+ required: true
+ options: [ 'yes', 'no' ]
+ type: choice
+ default: 'no'
+ checkOci:
+ description: 'Check OCI for existing artifacts? yes = check OCI, no = always build everything'
+ required: true
+ options: [ 'yes', 'no' ]
+ type: choice
+ default: 'yes'
+ extraParamsAllBuilds:
+ description: 'Extra params for all builds/jobs (prepare/artifact/image) (eg: DEBUG=yes)'
+ required: false
+ default: ''
+ type: string
+ branch:
+ type: choice
+ description: 'Framework build branch'
+ options:
+ # branches
+ - main
+ - v23.11
+ default: 'main'
+ board:
+ type: choice
+ description: 'Board'
+ options:
+ # boards
+ - armsom-w3
+ - bananapi
+ - bananapicm4io
+ - bananapim1plus
+ - bananapim2
+ - bananapim2plus
+ - bananapim2pro
+ - bananapim2s
+ - bananapim2ultra
+ - bananapim2zero
+ - bananapim3
+ - bananapim5
+ - bananapim64
+ - bananapipro
+ - bananapir2
+ - bananapir2pro
+ - beaglev
+ - bigtreetech-cb1
+ - clearfogbase
+ - clearfogpro
+ - clockworkpi-a06
+ - cubieboard
+ - cubieboard2
+ - cubieboard4
+ - cubietruck
+ - cubietruckplus
+ - cubox-i
+ - espressobin
+ - fe-som-rk3399
+ - firefly-rk3399
+ - fxblox-rk1
+ - helios4
+ - helios64
+ - hikey960
+ - hinlink-h28k
+ - hinlink-h88k
+ - imx7sabre
+ - indiedroid-nova
+ - inovato-quadra
+ - jethubj100
+ - jethubj80
+ - jetson-nano
+ - khadas-edge
+ - khadas-edge2
+ - khadas-vim1
+ - khadas-vim1s
+ - khadas-vim2
+ - khadas-vim3
+ - khadas-vim3l
+ - khadas-vim4
+ - lafrite
+ - lamobo-r1
+ - lepotato
+ - licheepi-4a
+ - lime-a10
+ - lime-a33
+ - lime-a64
+ - lime
+ - lime2
+ - macchiatobin-doubleshot
+ - mangopi-mq
+ - mekotronics-r58-minipc
+ - mekotronics-r58x-4g
+ - mekotronics-r58x-pro
+ - mekotronics-r58x
+ - melea1000
+ - micro
+ - microzed
+ - miqi
+ - mixtile-blade3
+ - mk808c
+ - nanopct3
+ - nanopct3plus
+ - nanopct4
+ - nanopct6
+ - nanopi-r1
+ - nanopi-r1s-h5
+ - nanopi-r2c
+ - nanopi-r2s
+ - nanopi-r4s
+ - nanopi-r4se
+ - nanopi-r5s
+ - nanopi-r6s
+ - nanopia64
+ - nanopiair
+ - nanopiduo
+ - nanopiduo2
+ - nanopifire3
+ - nanopik1plus
+ - nanopik2-s905
+ - nanopim1
+ - nanopim1plus
+ - nanopim1plus2
+ - nanopim3
+ - nanopim4
+ - nanopim4v2
+ - nanopineo
+ - nanopineo2
+ - nanopineo2black
+ - nanopineo3
+ - nanopineo4
+ - nanopineocore2
+ - nanopineoplus2
+ - nezha
+ - odroidc1
+ - odroidc2
+ - odroidc4
+ - odroidhc4
+ - odroidm1
+ - odroidn2
+ - odroidn2l
+ - odroidxu4
+ - olimex-som-a20
+ - olimex-som204-a20
+ - olimex-teres-a64
+ - olinux-som-a13
+ - onecloud
+ - orangepi-r1
+ - orangepi-r1plus-lts
+ - orangepi-r1plus
+ - orangepi-rk3399
+ - orangepi
+ - orangepi2
+ - orangepi3-lts
+ - orangepi3
+ - orangepi3b
+ - orangepi4-lts
+ - orangepi4
+ - orangepi5-plus
+ - orangepi5
+ - orangepilite
+ - orangepilite2
+ - orangepimini
+ - orangepione
+ - orangepioneplus
+ - orangepipc
+ - orangepipc2
+ - orangepipcplus
+ - orangepiplus
+ - orangepiplus2e
+ - orangepiprime
+ - orangepiwin
+ - orangepizero
+ - orangepizero2
+ - orangepizeroplus
+ - orangepizeroplus2-h3
+ - orangepizeroplus2-h5
+ - panther-x2
+ - pcduino2
+ - pcduino3
+ - pcduino3nano
+ - pine64
+ - pine64so
+ - pinebook-a64
+ - pinebook-pro
+ - pinecube
+ - pineh64-b
+ - pineh64
+ - qemu-uboot-arm64
+ - qemu-uboot-x86
+ - quartz64a
+ - quartz64b
+ - radxa-e25
+ - radxa-zero
+ - radxa-zero2
+ - recore
+ - renegade
+ - roc-rk3399-pc
+ - rock-3a
+ - rock-4se
+ - rock-5-cmio
+ - rock-5a
+ - rock-5b
+ - rock64
+ - rockpi-4a
+ - rockpi-4b
+ - rockpi-4bplus
+ - rockpi-4c
+ - rockpi-4cplus
+ - rockpi-e
+ - rockpi-n10
+ - rockpi-s
+ - rockpro64
+ - rpi4b
+ - sk-am62b
+ - sk-am64b
+ - sk-tda4vm
+ - star64
+ - station-m1
+ - station-m2
+ - station-m3
+ - station-p1
+ - station-p2
+ - thinkpad-x13s
+ - tinkerboard-2
+ - tinkerboard
+ - tritium-h3
+ - tritium-h5
+ - udoo
+ - uefi-arm64
+ - uefi-riscv64
+ - uefi-x86
+ - unleashed
+ - unmatched
+ - virtual-qemu
+ - visionfive
+ - visionfive2
+ - wsl2-arm64
+ - wsl2-x86
+ - xiaomi-elish
+ - zeropi
+ - all
+ default: 'all'
+ maintainer:
+ type: choice
+ description: 'Maintainer'
+ options:
+ # maintainers
+ - 150balbes
+ - 1ubuntuuser
+ - AGM1968
+ - AaronNGray
+ - DylanHP
+ - Kreyren
+ - ManoftheSea
+ - Manouchehri
+ - NicoD-SBC
+ - PanderMusubi
+ - StephenGraf
+ - TRSx80
+ - Tonymac32
+ - ZazaBR
+ - adeepn
+ - ahoneybun
+ - amazingfate
+ - bigtreetech
+ - brentr
+ - bretmlw
+ - catalinii
+ - chainsx
+ - clee
+ - devdotnetorg
+ - echatzip
+ - efectn
+ - eliasbakken
+ - engineer-80
+ - glneo
+ - hzyitc
+ - igorpecovnik
+ - jeanrhum
+ - joekhoobyar
+ - joshaspinall
+ - krachlatte
+ - lanefu
+ - lbmendes
+ - linhz0hz
+ - mahdichi
+ - monkaBlyat
+ - paolosabatino
+ - pyavitz
+ - rpardini
+ - schwar3kat
+ - sgjava
+ - sputnik2019
+ - teknoid
+ - utlark
+ - vamzii
+ - viraniac
+ - all
+ default: 'all'
+ targetsFilterInclude:
+ description: 'TARGETS_FILTER_INCLUDE, example: "BOARD:odroidhc4,BOARD:odroidn2"'
+ required: false
+ default: ''
+ type: string
+ nightlybuild:
+ description: 'yes = nighlty, no = stable'
+ required: false
+ options: [ 'yes', 'no' ]
+ type: choice
+ default: 'no'
+ uploadtoserver:
+ description: 'CDN upload github = releases, armbian = rsync.armbian.com'
+ required: false
+ options: [ 'github', 'armbian', 'both' ]
+ type: choice
+ default: 'armbian'
+ versionOverride:
+ description: 'Version override. Leave empty for automatic bump'
+ required: false
+ default: ''
+
+env:
+
+ # For easier reuse across the multiple chunks ('armbian/build' repo)
+ BUILD_REPOSITORY: "armbian/build"
+ BUILD_REF: "${{ inputs.ref || inputs.branch || 'main' }}" # branch or tag or sha1
+
+ # For easier reuse across the multiple chunks ('armbian/os' repo)
+ USERPATCHES_REPOSITORY: "armbian/os"
+ USERPATCHES_REF: "main" # branch or tag or sha1
+ USERPATCHES_DIR: "userpatches" # folder inside USERPATCHES_REPOSITORY
+
+ # Armbian envs. Adjust to your needs.
+ # This makes builds faster, but only if the Docker images are up-to-date with all dependencies, Python, tools, etc. Otherwise it makes it... slower.
+ DOCKER_SKIP_UPDATE: "yes" # Do not apt update/install/requirements/etc during Dockerfile build, trust that Docker images are up-to-date.
+
+ # Added to every build, even the prepare job.
+ EXTRA_PARAMS_ALL_BUILDS: "${{ inputs.extraParamsAllBuilds || 'UPLOAD_TO_OCI_ONLY=yes' }}"
+
+ VERSION_OVERRIDE: "${{ github.event.inputs.versionOverride }}"
+
+ # To use GitHub CLI in a GitHub Actions workflow
+ GH_TOKEN: "${{ secrets.ACCESS_TOKEN }}"
+
+ # Added to every image build arguments.
+ EXTRA_PARAMS_IMAGE: "COMPRESS_OUTPUTIMAGE=xz,sha SHOW_DEBIAN=yes SHARE_LOG=yes "
+
+# To ensure that only a single workflow using the same concurrency group will run at a time
+concurrency:
+ group: ${{ github.run_id }}-unsupportedstable
+ cancel-in-progress: false
+
+jobs:
+
+ # additional security check
+ team_check:
+ permissions:
+ actions: write
+ name: "Team check"
+ runs-on: [ "ubuntu-latest" ]
+ steps:
+
+ - name: "Check membership"
+ uses: armbian/actions/team-check@main
+ with:
+ ORG_MEMBERS: ${{ secrets.ORG_MEMBERS }}
+ GITHUB_TOKEN: "${{ env.GH_TOKEN }}"
+ TEAM: "Release manager"
+
+ version_prep:
+ needs: team_check
+ name: "Bump version"
+ runs-on: [ "ubuntu-latest" ]
+ steps:
+
+ # Cleaning self hosted runners
+ #- name: Runner clean
+ # uses: armbian/actions/runner-clean@main
+
+ # Clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: 0
+ clean: false # true is default.
+
+ - name: Determine version
+ id: versionfile
+ run: |
+ # file = where version is getting stored, different for stable and nightly
+ # skip_tag = we only upload nighlty to GH releases
+ echo "file=nightly" >> $GITHUB_OUTPUT
+ echo "skip_tag=false" >> $GITHUB_OUTPUT
+ echo "pre_release=true" >> $GITHUB_OUTPUT
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ echo "file=stable" >> $GITHUB_OUTPUT
+ echo "skip_tag=true" >> $GITHUB_OUTPUT
+ echo "pre_release=false" >> $GITHUB_OUTPUT
+ fi
+
+ # Bump version automatically
+ - name: Bump version
+ # if: ${{ (github.event.inputs.skipImages || 'no') != 'yes' }}
+# if: ${{ (github.event.inputs.skipImages || 'yes') != 'yes' && ( env.VERSION_OVERRIDE != '' ) }}
+# if: ${{ ! github.event.inputs.versionOverride }}
+ if: ${{ ( ! github.event.inputs.versionOverride ) && ( inputs.ref == '' ) }}
+ id: changelog
+ uses: TriPSs/conventional-changelog-action@v4.1.1
+ with:
+ github-token: ${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}
+ git-message: 'Bump release to {version}'
+ git-user-name: armbianworker
+ git-user-email: info@armbian.com
+ output-file: 'false'
+ skip-version-file: 'false'
+ skip-on-empty: 'false'
+ skip-commit: 'false'
+ skip-ci: 'false'
+ skip-tag: "${{ steps.versionfile.outputs.skip_tag }}"
+ version-file: "${{ steps.versionfile.outputs.file }}.json"
+ pre-release: "${{ steps.versionfile.outputs.pre_release }}"
+ git-branch: 'main'
+ tag-prefix: ''
+ pre-release-identifier: 'trunk'
+
+ - name: Read version from file if nor overriden
+ if: ${{ ! github.event.inputs.versionOverride }}
+ run: |
+ mkdir -p downloads
+ cat "${{ steps.versionfile.outputs.file }}.json" | jq '.version' | sed "s/\"//g" | sed 's/^/VERSION_OVERRIDE=/' >> $GITHUB_ENV
+ cat "${{ steps.versionfile.outputs.file }}.json" | jq '.version' | sed "s/\"//g" > downloads/version
+
+ - name: 'Upload Artifact'
+ uses: actions/upload-artifact@v3
+ with:
+ name: assets-for-download-stable
+ path: downloads
+ retention-days: 5
+
+ - name: "Generate body file"
+ if: ${{ (github.event.inputs.skipImages || 'no') != 'yes' }}
+ run: |
+ echo "
+
+
+
+ Rolling releases
+
+
+
+
+
+
+ - rolling releases are available at the bottom of official download pages
+ - if you want to change automated builds variants, edit .yaml files
+ - for old builds with unknown support status check archives
+
+
+
+ " > body.html
+
+ - uses: ncipollo/release-action@v1
+ if: ${{ (github.event.inputs.nightlybuild || 'no') == 'yes' && (github.event.inputs.skipImages || 'no') != 'yes' }}
+ with:
+ tag: "${{ env.VERSION_OVERRIDE }}"
+ name: "${{ env.VERSION_OVERRIDE }}"
+ bodyFile: "body.html"
+ prerelease: "true"
+ allowUpdates: true
+ removeArtifacts: true
+ token: ${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}
+
+ - name: Save
+ id: releases
+ run: |
+
+ echo "version=${{ env.VERSION_OVERRIDE }}" >> $GITHUB_OUTPUT
+
+ outputs:
+
+ # not related to matrix
+ version: ${{ steps.releases.outputs.version }}
+
+ matrix_prep:
+ name: "JSON matrix: 17/16 :: 17 artifact chunks, 16 image chunks"
+ if: ${{ github.repository_owner == 'armbian' }}
+ needs: [ version_prep ]
+ runs-on: [ "self-hosted", "Linux", 'alfa' ]
+ steps:
+
+ # Cleaning self hosted runners
+ - name: Runner clean
+ uses: armbian/actions/runner-clean@main
+
+ # clone the build system repo (`armbian/build`)
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ env.BUILD_REF }}
+ fetch-depth: 0
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+ path: build
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: 0
+ clean: false # true is default.
+ path: userpatches
+
+ - name: "grab the sha1 of the latest commit of the build repo ${{ env.BUILD_REPOSITORY }}#${{ env.BUILD_REF }}"
+ id: latest-commit
+ run: |
+ cd build
+ echo "sha1=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
+ cd ..
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+
+ mkdir -pv build/userpatches
+ rsync -av userpatches/${{env.USERPATCHES_DIR}}/. build/userpatches/
+
+ - name: GitHub cache
+ id: cache-restore
+ uses: actions/cache@v3
+ with:
+ path: |
+ cache/memoize
+ cache/oci/positive
+ key: ${{ runner.os }}-matrix-cache-${{ github.sha }}-${{ steps.latest-commit.outputs.sha1 }}"
+ restore-keys: |
+ ${{ runner.os }}-matrix-cache-
+
+ # Login to ghcr.io, we're gonna do a lot of OCI lookups.
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access.
+
+ - name: Prepare Info JSON and Matrices
+ id: prepare-matrix
+ run: |
+
+ FILTERS="${{ github.event.inputs.targetsFilterInclude }}"
+
+ if [ -z "${FILTERS}" ] && [ "${{ github.event.inputs.board }}" != "all" ] && [ -n "${{ github.event.inputs.board }}" ]; then
+ FILTERS='"BOARD:${{ github.event.inputs.board }}"'
+ fi
+
+ if [ -z "${FILTERS}" ] && [ "${{ github.event.inputs.maintainer }}" != "all" ] && [ -n "${{ github.event.inputs.board }}" ]; then
+ FILTERS='"BOARD_MAINTAINERS:${{ github.event.inputs.maintainer }}"'
+ fi
+
+ # this sets outputs "artifact-matrix" #and "image-matrix"
+ cd build
+ bash ./compile.sh gha-matrix armbian-images \
+ REVISION="${{ needs.version_prep.outputs.version }}" \
+ TARGETS_FILTER_INCLUDE="${FILTERS}" \
+ BETA=${{ github.event.inputs.nightlybuild || 'no' }} \
+ CLEAN_INFO=yes \
+ CLEAN_MATRIX=yes \
+ MATRIX_ARTIFACT_CHUNKS=17 \
+ MATRIX_IMAGE_CHUNKS=16 \
+ CHECK_OCI=${{ github.event.inputs.checkOci || 'no' }} \
+ TARGETS_FILENAME="targets-release-community-maintained.yaml" \
+ SKIP_IMAGES=${{ github.event.inputs.skipImages || 'no'}} \
+ ${{env.EXTRA_PARAMS_ALL_BUILDS}} SHARE_LOG=yes # IMAGES_ONLY_OUTDATED_ARTIFACTS=yes
+
+ - name: "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}"
+
+ # Store output/info folder in a GitHub Actions artifact
+ - uses: actions/upload-artifact@v3
+ name: Upload output/info as GitHub Artifact
+ with:
+ name: build-info-json
+ path: build/output/info
+
+ - name: chown cache memoize/oci back to normal user
+ run: sudo chown -R $USER:$USER build/cache/memoize build/cache/oci/positive
+
+ outputs:
+
+ # not related to matrix
+ build-sha1: ${{ steps.latest-commit.outputs.sha1 }}
+ version: ${{ needs.version_prep.outputs.version }}
+
+# template file: 150.per-chunk-artifacts_prep-outputs.yaml
+
+ # artifacts-1 of 17
+ artifacts-chunk-json-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-1 }}
+ artifacts-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-1 }}
+ artifacts-chunk-size-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-1 }}
+ # artifacts-2 of 17
+ artifacts-chunk-json-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-2 }}
+ artifacts-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-2 }}
+ artifacts-chunk-size-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-2 }}
+ # artifacts-3 of 17
+ artifacts-chunk-json-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-3 }}
+ artifacts-chunk-not-empty-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-3 }}
+ artifacts-chunk-size-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-3 }}
+ # artifacts-4 of 17
+ artifacts-chunk-json-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-4 }}
+ artifacts-chunk-not-empty-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-4 }}
+ artifacts-chunk-size-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-4 }}
+ # artifacts-5 of 17
+ artifacts-chunk-json-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-5 }}
+ artifacts-chunk-not-empty-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-5 }}
+ artifacts-chunk-size-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-5 }}
+ # artifacts-6 of 17
+ artifacts-chunk-json-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-6 }}
+ artifacts-chunk-not-empty-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-6 }}
+ artifacts-chunk-size-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-6 }}
+ # artifacts-7 of 17
+ artifacts-chunk-json-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-7 }}
+ artifacts-chunk-not-empty-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-7 }}
+ artifacts-chunk-size-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-7 }}
+ # artifacts-8 of 17
+ artifacts-chunk-json-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-8 }}
+ artifacts-chunk-not-empty-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-8 }}
+ artifacts-chunk-size-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-8 }}
+ # artifacts-9 of 17
+ artifacts-chunk-json-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-9 }}
+ artifacts-chunk-not-empty-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-9 }}
+ artifacts-chunk-size-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-9 }}
+ # artifacts-10 of 17
+ artifacts-chunk-json-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-10 }}
+ artifacts-chunk-not-empty-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-10 }}
+ artifacts-chunk-size-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-10 }}
+ # artifacts-11 of 17
+ artifacts-chunk-json-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-11 }}
+ artifacts-chunk-not-empty-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-11 }}
+ artifacts-chunk-size-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-11 }}
+ # artifacts-12 of 17
+ artifacts-chunk-json-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-12 }}
+ artifacts-chunk-not-empty-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-12 }}
+ artifacts-chunk-size-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-12 }}
+ # artifacts-13 of 17
+ artifacts-chunk-json-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-13 }}
+ artifacts-chunk-not-empty-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-13 }}
+ artifacts-chunk-size-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-13 }}
+ # artifacts-14 of 17
+ artifacts-chunk-json-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-14 }}
+ artifacts-chunk-not-empty-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-14 }}
+ artifacts-chunk-size-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-14 }}
+ # artifacts-15 of 17
+ artifacts-chunk-json-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-15 }}
+ artifacts-chunk-not-empty-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-15 }}
+ artifacts-chunk-size-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-15 }}
+ # artifacts-16 of 17
+ artifacts-chunk-json-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-16 }}
+ artifacts-chunk-not-empty-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-16 }}
+ artifacts-chunk-size-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-16 }}
+ # artifacts-17 of 17
+ artifacts-chunk-json-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-17 }}
+ artifacts-chunk-not-empty-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-17 }}
+ artifacts-chunk-size-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-17 }}
+
+# template file: 151.per-chunk-images_prep-outputs.yaml
+
+ # artifacts-1 of 16
+ images-chunk-json-1: ${{ steps.prepare-matrix.outputs.images-chunk-json-1 }}
+ images-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-1 }}
+ images-chunk-size-1: ${{ steps.prepare-matrix.outputs.images-chunk-size-1 }}
+ # artifacts-2 of 16
+ images-chunk-json-2: ${{ steps.prepare-matrix.outputs.images-chunk-json-2 }}
+ images-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-2 }}
+ images-chunk-size-2: ${{ steps.prepare-matrix.outputs.images-chunk-size-2 }}
+ # artifacts-3 of 16
+ images-chunk-json-3: ${{ steps.prepare-matrix.outputs.images-chunk-json-3 }}
+ images-chunk-not-empty-3: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-3 }}
+ images-chunk-size-3: ${{ steps.prepare-matrix.outputs.images-chunk-size-3 }}
+ # artifacts-4 of 16
+ images-chunk-json-4: ${{ steps.prepare-matrix.outputs.images-chunk-json-4 }}
+ images-chunk-not-empty-4: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-4 }}
+ images-chunk-size-4: ${{ steps.prepare-matrix.outputs.images-chunk-size-4 }}
+ # artifacts-5 of 16
+ images-chunk-json-5: ${{ steps.prepare-matrix.outputs.images-chunk-json-5 }}
+ images-chunk-not-empty-5: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-5 }}
+ images-chunk-size-5: ${{ steps.prepare-matrix.outputs.images-chunk-size-5 }}
+ # artifacts-6 of 16
+ images-chunk-json-6: ${{ steps.prepare-matrix.outputs.images-chunk-json-6 }}
+ images-chunk-not-empty-6: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-6 }}
+ images-chunk-size-6: ${{ steps.prepare-matrix.outputs.images-chunk-size-6 }}
+ # artifacts-7 of 16
+ images-chunk-json-7: ${{ steps.prepare-matrix.outputs.images-chunk-json-7 }}
+ images-chunk-not-empty-7: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-7 }}
+ images-chunk-size-7: ${{ steps.prepare-matrix.outputs.images-chunk-size-7 }}
+ # artifacts-8 of 16
+ images-chunk-json-8: ${{ steps.prepare-matrix.outputs.images-chunk-json-8 }}
+ images-chunk-not-empty-8: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-8 }}
+ images-chunk-size-8: ${{ steps.prepare-matrix.outputs.images-chunk-size-8 }}
+ # artifacts-9 of 16
+ images-chunk-json-9: ${{ steps.prepare-matrix.outputs.images-chunk-json-9 }}
+ images-chunk-not-empty-9: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-9 }}
+ images-chunk-size-9: ${{ steps.prepare-matrix.outputs.images-chunk-size-9 }}
+ # artifacts-10 of 16
+ images-chunk-json-10: ${{ steps.prepare-matrix.outputs.images-chunk-json-10 }}
+ images-chunk-not-empty-10: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-10 }}
+ images-chunk-size-10: ${{ steps.prepare-matrix.outputs.images-chunk-size-10 }}
+ # artifacts-11 of 16
+ images-chunk-json-11: ${{ steps.prepare-matrix.outputs.images-chunk-json-11 }}
+ images-chunk-not-empty-11: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-11 }}
+ images-chunk-size-11: ${{ steps.prepare-matrix.outputs.images-chunk-size-11 }}
+ # artifacts-12 of 16
+ images-chunk-json-12: ${{ steps.prepare-matrix.outputs.images-chunk-json-12 }}
+ images-chunk-not-empty-12: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-12 }}
+ images-chunk-size-12: ${{ steps.prepare-matrix.outputs.images-chunk-size-12 }}
+ # artifacts-13 of 16
+ images-chunk-json-13: ${{ steps.prepare-matrix.outputs.images-chunk-json-13 }}
+ images-chunk-not-empty-13: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-13 }}
+ images-chunk-size-13: ${{ steps.prepare-matrix.outputs.images-chunk-size-13 }}
+ # artifacts-14 of 16
+ images-chunk-json-14: ${{ steps.prepare-matrix.outputs.images-chunk-json-14 }}
+ images-chunk-not-empty-14: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-14 }}
+ images-chunk-size-14: ${{ steps.prepare-matrix.outputs.images-chunk-size-14 }}
+ # artifacts-15 of 16
+ images-chunk-json-15: ${{ steps.prepare-matrix.outputs.images-chunk-json-15 }}
+ images-chunk-not-empty-15: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-15 }}
+ images-chunk-size-15: ${{ steps.prepare-matrix.outputs.images-chunk-size-15 }}
+ # artifacts-16 of 16
+ images-chunk-json-16: ${{ steps.prepare-matrix.outputs.images-chunk-json-16 }}
+ images-chunk-not-empty-16: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-16 }}
+ images-chunk-size-16: ${{ steps.prepare-matrix.outputs.images-chunk-size-16 }}
+
+# template file: 250.single_aggr-jobs.yaml
+
+ # ------ aggregate all artifact chunks into a single dependency -------
+
+ all-artifacts-ready:
+ name: "17 artifacts chunks ready"
+ runs-on: ubuntu-latest # not going to run, anyway, but is required.
+ if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run.
+ needs: [ "matrix_prep", "build-artifacts-chunk-1","build-artifacts-chunk-2","build-artifacts-chunk-3","build-artifacts-chunk-4","build-artifacts-chunk-5","build-artifacts-chunk-6","build-artifacts-chunk-7","build-artifacts-chunk-8","build-artifacts-chunk-9","build-artifacts-chunk-10","build-artifacts-chunk-11","build-artifacts-chunk-12","build-artifacts-chunk-13","build-artifacts-chunk-14","build-artifacts-chunk-15","build-artifacts-chunk-16","build-artifacts-chunk-17" ] # <-- HERE: all artifact chunk numbers.
+ steps:
+ - name: fake step
+ run: uptime
+
+ all-images-ready:
+ name: "16 image chunks ready"
+ runs-on: ubuntu-latest # not going to run, anyway, but is required.
+ if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run.
+ needs: [ "matrix_prep", "build-images-chunk-1","build-images-chunk-2","build-images-chunk-3","build-images-chunk-4","build-images-chunk-5","build-images-chunk-6","build-images-chunk-7","build-images-chunk-8","build-images-chunk-9","build-images-chunk-10","build-images-chunk-11","build-images-chunk-12","build-images-chunk-13","build-images-chunk-14","build-images-chunk-15","build-images-chunk-16" ] # <-- HERE: all image chunk numbers.
+ steps:
+ - name: fake step
+ run: uptime
+
+ all-artifacts-and-images-ready:
+ name: "17 artifacts and 16 image chunks ready"
+ runs-on: ubuntu-latest # not going to run, anyway, but is required.
+ if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run.
+ needs: [ "matrix_prep", "all-artifacts-ready", "all-images-ready" ]
+ steps:
+ - name: fake step
+ run: uptime
+
+# template file: 550.per-chunk-artifacts_job.yaml
+
+ "build-artifacts-chunk-1": # templated "build-artifacts-chunk-1"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-1 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-1) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A1' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-2": # templated "build-artifacts-chunk-2"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-2 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-2) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A2' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-3": # templated "build-artifacts-chunk-3"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-3 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-3) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A3' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-4": # templated "build-artifacts-chunk-4"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-4 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-4) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A4' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-5": # templated "build-artifacts-chunk-5"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-5 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-5) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A5' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-6": # templated "build-artifacts-chunk-6"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-6 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-6) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A6' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-7": # templated "build-artifacts-chunk-7"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-7 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-7) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A7' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-8": # templated "build-artifacts-chunk-8"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-8 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-8) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A8' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-9": # templated "build-artifacts-chunk-9"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-9 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-9) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A9' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-10": # templated "build-artifacts-chunk-10"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-10 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-10) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A10' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-11": # templated "build-artifacts-chunk-11"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-11 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-11) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A11' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-12": # templated "build-artifacts-chunk-12"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-12 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-12) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A12' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-13": # templated "build-artifacts-chunk-13"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-13 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-13) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A13' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-14": # templated "build-artifacts-chunk-14"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-14 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-14) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A14' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-15": # templated "build-artifacts-chunk-15"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-15 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-15) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A15' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-16": # templated "build-artifacts-chunk-16"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-16 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-16) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A16' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+ "build-artifacts-chunk-17": # templated "build-artifacts-chunk-17"
+ if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-17 == 'yes' }} # <-- HERE: Chunk number.
+ needs: [ "matrix_prep" ]
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-17) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty A17' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Build ${{matrix.desc}}
+ timeout-minutes: 45
+ id: build
+ run: |
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build.outputs.logs_url }}"
+
+# template file: 650.per-chunk-images_job.yaml
+
+ "build-images-chunk-1": # templated "build-images-chunk-1"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-1 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-1) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I1' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-2": # templated "build-images-chunk-2"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-2 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-2) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I2' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-3": # templated "build-images-chunk-3"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-3 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-3) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I3' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-4": # templated "build-images-chunk-4"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-4 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-4) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I4' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-5": # templated "build-images-chunk-5"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-5 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-5) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I5' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-6": # templated "build-images-chunk-6"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-6 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-6) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I6' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-7": # templated "build-images-chunk-7"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-7 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-7) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I7' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-8": # templated "build-images-chunk-8"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-8 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-8) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I8' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-9": # templated "build-images-chunk-9"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-9 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-9) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I9' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-10": # templated "build-images-chunk-10"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-10 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-10) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I10' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-11": # templated "build-images-chunk-11"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-11 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-11) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I11' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-12": # templated "build-images-chunk-12"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-12 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-12) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I12' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-13": # templated "build-images-chunk-13"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-13 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-13) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I13' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-14": # templated "build-images-chunk-14"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-14 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-14) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I14' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-15": # templated "build-images-chunk-15"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-15 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-15) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I15' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+ "build-images-chunk-16": # templated "build-images-chunk-16"
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ timeout-minutes: 180
+ if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-16 == 'yes' ) }} # <-- HERE: Chunk number.
+ strategy:
+ fail-fast: false # let other jobs try to complete if one fails
+ matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-16) }} # <-- HERE: Chunk number.
+ name: ${{ matrix.desc || 'Empty I16' }} # <-- HERE: Chunk number.
+ runs-on: ${{ matrix.runs_on }}
+ steps:
+
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/mktorrent ]; then
+ sudo apt-get update
+ sudo apt-get install -y mktorrent
+ fi
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the torrent lists
+ - name: Checkout torrent lists
+ uses: actions/checkout@v4
+ with:
+ repository: XIU2/TrackersListCollection
+ clean: false
+ ref: master
+ path: trackerslist.repo
+ fetch-depth: ${{ matrix.fdepth }}
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: ${{ matrix.fdepth }}
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ rm -rf userpatches.repo
+
+ - name: Cleanup leftover output
+ run: |
+ rm -f userpatches/VERSION
+
+ - name: ${{matrix.desc}}
+ id: build-one-image
+ timeout-minutes: 45
+ run: |
+ # calculate loop from runner name
+ if [ -z "${ImageOS}" ]; then
+ USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
+ fi
+ bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+ if: always()
+ run: |
+ echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Check API rate limits
+ run: |
+
+ # install dependencies
+ if ! command -v "gh" > /dev/null 2>&1; then
+ sudo apt-get -y -qq install gh
+ fi
+
+ while true
+ do
+ API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
+ API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
+ PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
+ if (( $PERCENT > 20 )); then
+ echo "API rate in good shape $PERCENT % free"
+ exit 0
+ fi
+ echo "API rate lower then 20%, sleping 10m"
+ sleep 10m
+ done
+
+ # show current api rate
+ curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
+
+ - name: Import GPG key
+ env:
+ GPG_KEY1: ${{ secrets.GPG_KEY1 }}
+ if: env.GPG_KEY1 != null
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Sign
+ env:
+ GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
+ if: env.GPG_PASSPHRASE1 != null
+ run: |
+
+ echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.img*.xz
+
+ - name: Generate torrent
+ timeout-minutes: 3
+ run: |
+
+ TRACKERS=$(cat trackerslist.repo/best.txt | sed '/^\s*$/d' | sort -R | while read line; do printf " --announce=""${line}"; done)
+ BOARD=$(ls -1 output/images/ | head -1)
+ FILE=$(ls -1 output/images/*/archive/*.img*.xz | head -1 | rev | cut -d"/" -f1 | rev)
+ WEBSEEDS="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
+ # nighly images are GH only
+ rm -f servers.txt
+ if [ "${{ github.event.inputs.nightlybuild || 'no' }}" == "no" ]; then
+ # add seeds from different regions
+ #WEBSEEDS=$(curl -sq http://redirect.armbian.com/mirrors | jq -Mr '.' | grep http | tr -d \"," " | sort | uniq | sed "s|$|${BOARD}\/archive\/${FILE},|")
+
+ SERVERS=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=images&status=active" \
+ | jq '.results[] | .name' | grep -v null | sed "s/\"//g")
+ WEBSEEDS=$(for server in $SERVERS; do
+ JSON=$(curl -s -H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" -H "Accept: application/json; indent=4" \
+ "https://stuff.armbian.com/netbox/api/virtualization/virtual-machines/?limit=500&name__empty=false&name=${server}" | jq)
+ SERVER_PATH=$(echo "${JSON}" | jq '.results[] | .custom_fields["download_path_images"]' | sed "s/\"//g")
+ if [ "${SERVER_PATH}" == "null" ]; then SERVER_PATH="dl"; fi
+ echo "http://$server/$SERVER_PATH/" | sed "s|$|${BOARD}\/archive\/${FILE},|"
+ done | sed 's/ \+/\n/g')
+ fi
+ cd output/images/*/archive/
+ mktorrent --comment="Armbian torrent for ${FILE}" --verbose ${TRACKERS} --web-seed="${WEBSEEDS}" ${FILE}
+
+ - name: Choose random user for upload
+ run: |
+
+ arr[0]="${{ secrets.ACCESS_TOKEN }}"
+ arr[1]="${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}"
+ rand=$[ $RANDOM % 2 ]
+ echo "upload_user=${arr[$rand]}" >> $GITHUB_ENV
+
+
+ - name: "Upload artefacts"
+ timeout-minutes: 15
+ #if: ${{ github.repository_owner == 'Armbian' && github.event.inputs.release != 'stable' }}
+ #if: ${{ inputs.release != 'stable' && github.event.inputs.uploadtoserver != 'yes' }}
+ #if: (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'github') || (${{ github.event.inputs.uploadtoserver || 'armbian' }} == 'both')
+ if: ${{ ( github.event.inputs.nightlybuild || 'no' ) == 'yes' }}
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: "output/images/*/*/Armbian_*.*"
+ tag: "${{ needs.matrix_prep.outputs.version }}"
+ omitBody: true
+ replacesArtifacts: false
+ omitName: true
+ makeLatest: false
+ omitPrereleaseDuringUpdate: true
+ allowUpdates: true
+ artifactErrorsFailBuild: false
+ #token: ${{ secrets.ACCESS_TOKEN }}
+ token: ${{ env.upload_user }}
+
+ - name: Deploy to server
+ timeout-minutes: 15
+ if: ${{ github.event.inputs.uploadtoserver == 'armbian' || github.event.inputs.uploadtoserver == 'both' }}
+ run: |
+
+ # generate control file which checks mirrors
+ sudo date +%s > output/images/control
+ #rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -rv output/images/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/storage2/dl
+ rsync -e "ssh -p 10023 -o StrictHostKeyChecking=accept-new" -rv output/images/ upload@rsync.armbian.com:/storage/incoming
+
+ # cleaning self hosted runners
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ if: always()
+ uses: armbian/actions/runner-clean@main
+
+# template file: 750.single_repo.yaml
+
+ # ------ publish packages to repository -------
+
+ publish-debs-to-repo:
+ name: "Download artifacts from ORAS cache"
+ runs-on: [ repository ]
+ if: ${{ !failure() && !cancelled() && github.event.inputs.targetsFilterInclude == '' && inputs.ref == '' }} # eg: run if dependencies worked. See https://github.com/orgs/community/discussions/45058#discussioncomment-4817378
+ needs: [ "matrix_prep", "all-artifacts-ready" ]
+ steps:
+
+ - name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
+ uses: armbian/actions/runner-clean@main
+
+ # Prepare dependencies.
+ # If no /usr/bin/gpg, install gnupg2
+ # If no /usr/bin/reprepro, install reprepro
+ # If no /usr/bin/lftp, install lftp
+ - name: Install dependencies
+ run: |
+ if [ ! -e /usr/bin/gpg ]; then
+ sudo apt-get update
+ sudo apt-get install -y gnupg2
+ fi
+ if [ ! -e /usr/bin/reprepro ]; then
+ sudo apt-get update
+ sudo apt-get install -y reprepro
+ fi
+ if [ ! -e /usr/bin/lftp ]; then
+ sudo apt-get update
+ sudo apt-get install -y lftp
+ fi
+
+
+ # Login to ghcr.io, for later uploading rootfs to ghcr.io
+ - name: Docker Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: "${{ github.repository_owner }}" # GitHub username or org
+ password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
+
+ # cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
+ - name: Cleanup userpatches repo
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: rm -rf userpatches.repo
+
+ - name: Checkout build repo
+ uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
+ with:
+ repository: ${{ env.BUILD_REPOSITORY }}
+ ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
+ fetch-depth: 0
+ clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
+
+ # clone the userpatches repo (`armbian/os`)
+ - name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
+ uses: actions/checkout@v4
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ with:
+ repository: ${{ env.USERPATCHES_REPOSITORY }}
+ ref: ${{ env.USERPATCHES_REF }}
+ fetch-depth: 0
+ clean: false # true is default.
+ path: userpatches.repo
+
+ - name: "Put userpatches in place, and remove userpatches repo"
+ if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
+ run: |
+ mkdir -pv userpatches
+ rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
+ #rm -rf userpatches.repo
+
+ # Clean off output/info, if any
+ # Clean off debs and debs-beta
+ - name: Cleanup output/info
+ run: |
+ rm -rfv output/info output/debs output/debs-beta
+ mkdir -pv output
+
+ # Download the artifacts (output/info) produced by the prepare-matrix job.
+ - name: Download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: build-info-json
+ path: output/info
+
+ # List the artifacts we downloaded
+ - name: List artifacts
+ run: |
+ ls -laht output/info
+
+ - name: Download the debs
+ id: download-debs
+ run: |
+ bash ./compile.sh debs-to-repo-download REVISION="${{ needs.matrix_prep.outputs.version }}" BETA=${{ github.event.inputs.nightlybuild || 'no' }} SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
+
+ - name: Import GPG key
+ uses: crazy-max/ghaction-import-gpg@v6
+ with:
+ gpg_private_key: ${{ secrets.GPG_KEY1 }}
+ passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
+
+ - name: Install SSH key
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.KEY_UPLOAD }}
+ known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
+ if_key_exists: replace
+
+ - name: Sync parts when making single images / maintainer
+# if: ${{ (github.event.inputs.skipImages || 'no') == 'no' }}
+ run: |
+
+ #
+ TARGET=""
+ if [ "${{ github.event.inputs.skipImages}}" == "no" ] || [ "'no'" == "no" ]; then
+ TARGET="partial/"
+ else
+ # drop nightly repository
+ sudo rm -rf /outgoing/repository-beta/*
+ # sync to stable from the list
+ rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" --include-from=userpatches.repo/stable-repo.sync \
+ --exclude='*' --progress -va output/debs-beta/. \
+ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/incoming/${TARGET}
+ fi
+
+ echo "sync all parts"
+ rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }}" -arvc \
+ --include='debs***' \
+ --exclude='*' \
+ --remove-source-files \
+ --delete \
+ output/ ${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:/incoming/${TARGET}
+ # clean
+ find output/. -type d -empty -delete
+
+ - name: "Run repository update action"
+ if: ${{ (github.event.inputs.skipImages || 'no') == 'yes' }}
+ uses: peter-evans/repository-dispatch@v2
+ with:
+ token: ${{ secrets.DISPATCH }}
+ repository: armbian/os
+ event-type: "Repository update"
+
+ - name: "Logs debs-to-repo-download: ${{ steps.download-debs.outputs.logs_url }}"
+ run: |
+ echo "Logs debs-to-repo-download: ${{ steps.download-debs.outputs.logs_url }}"
+
+ outputs:
+
+ # not related to matrix
+ version: ${{ needs.matrix_prep.outputs.version }}
+
+# template file: 950.single_footer.yaml
+
+ # ------ aggregate all artifact chunks into a single dependency -------
+
+ closing:
+ name: "Footer"
+ runs-on: ubuntu-latest
+ if: ${{ !failure() && !cancelled() && inputs.ref == '' && (github.event.inputs.nightlybuild || 'no') == 'yes' }}
+ needs: [ "matrix_prep", "all-artifacts-ready", "all-images-ready" ]
+ steps:
+
+ - name: "Run webindex update action"
+ if: ${{ (github.event.inputs.skipImages || 'no') == 'no' }}
+ uses: peter-evans/repository-dispatch@v2
+ with:
+ token: ${{ secrets.DISPATCH }}
+ repository: armbian/os
+ event-type: "Webindex update"
+
+ - name: "Download all workflow run artifacts"
+ if: ${{ (github.event.inputs.skipImages || 'no') != 'yes' }}
+ uses: actions/download-artifact@v3
+ with:
+ name: assets-for-download-stable
+ path: downloads
+
+ - name: "Read version"
+ run: |
+
+ echo "version=$(cat downloads/version 2>/dev/null || true)" >> $GITHUB_ENV
+
+ # Delete artifact
+ - uses: geekyeggo/delete-artifact@v2
+ with:
+ name: assets-for-download-stable
+ failOnError: false
+
+ # Cleaning releases
+ #
+ # To do: we need to differentiate between pre and releases and optimise clenaing procees. Following action doesn't know to handle this best
+# - uses: dev-drprasad/delete-older-releases@v0.3.2
+# with:
+# repo: armbian/os
+# keep_latest: 16
+# env:
+# GITHUB_TOKEN: "${{ env.GH_TOKEN }}"
+
+ # Cleaning logs
+ - name: "Keep only 14 days of workflow logs"
+ uses: igorjs/gh-actions-clean-workflow@v4
+ with:
+ token: "${{ env.GH_TOKEN }}"
+ days_old: 14
+
+ # Switch pre-release to release
+ - uses: ncipollo/release-action@v1
+ if: ${{ (github.event.inputs.skipImages || 'no') != 'yes' && (github.event.inputs.nightlybuild || 'no') == 'yes' }}
+ with:
+ tag: "${{ env.version }}"
+ omitBody: true
+ omitName: true
+ allowUpdates: true
+ makeLatest: true
+ token: "${{ env.GH_TOKEN }}"