Skip to content

mcfost: HDF5ROOT to HDF5_DIR #65

mcfost: HDF5ROOT to HDF5_DIR

mcfost: HDF5ROOT to HDF5_DIR #65

Workflow file for this run

name: build
on:
push:
branches: [ master ]
paths-ignore:
- 'docs/**'
- 'README.md'
pull_request:
branches: [ master ]
paths-ignore:
- 'docs/**'
- 'README.md'
# schedule:
# - cron: "0 0 * * *"
env:
OMP_STACKSIZE: 512M
SPLASH_DIR: ${{ github.workspace }}/splash
PHANTOM_DIR: ${{ github.workspace }}
WEB_USER: github
WEB_SERVER: data.phantom.cloud.edu.au
WEB_HTML_DIR: /var/www/html
BUILD_LOG_DIR: /ci/build/logs
RSYNC_RSH: ssh -o "StrictHostKeyChecking=no" -o "UserKnownHostsFile=/dev/null"
NPARALLEL: 32
jobs:
matrix_prep:
# Skip scheduled runs on forks
if: ${{ github.event_name != 'schedule' || github.repository == 'danieljprice/phantom' }}
runs-on: ubuntu-latest
outputs:
batch: ${{ steps.set-sequence.outputs.batch }}
nbatch: ${{ steps.set-sequence.outputs.nbatch }}
steps:
- name: Check out repo
uses: actions/checkout@v3
- name: Generate sequence of batch numbers for normal tests, or run sequentially for scheduled tests
id: set-sequence
run: |
if [[ ${{ github.event_name == 'schedule' }} ]]; then
range=$(jq -ncR "[range(1;${NPARALLEL}+1)]")
echo "::set-output name=batch::${range}"
echo "::set-output name=nbatch::${NPARALLEL}"
else
echo "::set-output name=batch::[1]"
echo "::set-output name=nbatch::1"
fi
build:
needs: matrix_prep
strategy:
fail-fast: false
matrix:
system:
- gfortran
- ifort
batch: ${{ fromJson(needs.matrix_prep.outputs.batch) }}
name: build (batch ${{ matrix.batch }}/${{ needs.matrix_prep.outputs.nbatch }}, SYSTEM=${{ matrix.system }})
runs-on: ubuntu-latest
steps:
- name: "Nuke the github workspace before doing anything"
run: rm -r ${{ github.workspace }} && mkdir ${{ github.workspace }}
- name: Setup Intel repo
if: matrix.system == 'ifort'
id: intel-repo
run: |
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
sudo echo "deb https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list
sudo apt-get update
INTELVERSION=$(apt-cache show intel-oneapi-compiler-fortran | grep Version | head -1)
echo "::set-output name=intelversion::$INTELVERSION"
- name: Cache intel installation
if: matrix.system == 'ifort'
id: cache-intel
uses: actions/cache@v3
with:
path: |
/opt/intel
key: ${{ steps.intel-repo.outputs.intelversion }}
- name: Install Intel compilers
if: ${{ steps.cache-intel.outputs.cache-hit != 'true' && matrix.system == 'ifort' }}
run: |
sudo apt-get install -y intel-oneapi-common-vars
sudo apt-get install -y intel-oneapi-compiler-fortran
sudo apt-get install -y intel-oneapi-mpi
sudo apt-get install -y intel-oneapi-mpi-devel
- name: Setup Intel oneAPI environment
if: matrix.system == 'ifort'
run: |
source /opt/intel/oneapi/setvars.sh
printenv >> $GITHUB_ENV
- name: Install numpy and matplotlib for analysis unit tests
run: |
sudo apt-get install -y python3-numpy
sudo apt-get install -y python3-matplotlib
- name: "Clone phantom"
uses: actions/checkout@v3
- name: "Grab a copy of splash source code"
uses: actions/checkout@v3
with:
repository: danieljprice/splash
path: splash
- name: "Create logs directory"
if: github.event_name == 'schedule'
run: mkdir logs
# - name: "Grab previous build logs from web server"
# if: github.event_name == 'schedule'
# env:
# WGET: wget --recursive --no-parent --reject "index.html*" --cut-dirs=2 --no-host-directories
# run: ${WGET} -A '*${{ matrix.system[1] }}.txt' http://${WEB_SERVER}${BUILD_LOG_DIR}/ || true
- name: "Run buildbot.sh"
run: ./buildbot.sh --maxdim 17000000 --url http://${WEB_SERVER}/${BUILD_LOG_DIR} --parallel ${{ matrix.batch }} ${{ env.NPARALLEL }}
working-directory: scripts
env:
SYSTEM: ${{ matrix.system }}
RETURN_ERR: yes
# - name: "Install SSH Key"
# if: github.event_name == 'schedule'
# uses: webfactory/ssh-agent@v0.5.3
# with:
# ssh-private-key: ${{ secrets.RUNNER_PRIVATE_KEY }}
# - name: "Copy new build logs to web server"
# if: ${{ (success() || failure()) && github.event_name == 'schedule' }}
# run: rsync -vau logs/*.txt ${WEB_USER}@${WEB_SERVER}:${WEB_HTML_DIR}/${BUILD_LOG_DIR}
# - name: "Copy HTML files to web server"
# if: ${{ (success() || failure()) && github.event_name == 'schedule' }}
# run: |
# export WEB_BUILD_DIR=${WEB_HTML_DIR}/nightly/build/$(date "+%Y%m%d")
# ssh -o "StrictHostKeyChecking=no" -o "UserKnownHostsFile=/dev/null" ${WEB_USER}@${WEB_SERVER} -- mkdir -p ${WEB_BUILD_DIR}
# rsync -vau logs/*.html ${WEB_USER}@${WEB_SERVER}:${WEB_BUILD_DIR}/
- name: logs/build-failures-${{ matrix.system }}.txt
if: always()
run: cat logs/build-failures-${{ matrix.system }}.txt || true
- name: logs/setup-failures-${{ matrix.system }}.txt
if: always()
run: cat logs/setup-failures-${{ matrix.system }}.txt || true
- name: logs/analysis-failures-${{ matrix.system }}.txt
if: always()
run: cat logs/analysis-failures-${{ matrix.system }}.txt || true
- name: logs/make-*-${{ matrix.system }}.txt
if: always()
run: |
echo
for item in $(ls logs/make-*-${{ matrix.system }}.txt); do
echo ::group::"${item}"
cat $item
echo ::endgroup::
done
# Gather results into a dummy job that will fail if the previous job fails
gather_results:
if: ${{ always() && github.event_name != 'schedule' }}
needs:
- build
# This name matches the branch protection requirement
name: build
# Always run on github runner; no need to use custom runner for the check
runs-on: ubuntu-latest
steps:
- name: Check all builds succeeded
run: |
if [[ "${{ needs.build.result }}" == "success" ]]; then
echo "All build succeeded"
else
echo "At least one build failed"
exit 1
fi