Skip to content

Implement standard MPI ABI #434

Implement standard MPI ABI

Implement standard MPI ABI #434

Workflow file for this run

name: CI
on:
push:
branches: [main]
pull_request:
branches: [main]
env:
BUILD_TYPE: Debug
jobs:
build:
strategy:
matrix:
os: [ubuntu-22.04]
mpi: [mpich, openmpi]
shared: [shared-off, shared-on]
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v4
- name: Install ${{matrix.mpi}}
run: |
case "${{matrix.mpi}}" in
mpich)
sudo apt-get update
sudo apt-get install libmpich-dev
;;
openmpi)
sudo apt-get update
sudo apt-get install libopenmpi-dev
;;
esac
# - name: Restore dependencies
# id: cache
# uses: actions/cache/restore@v3
# with:
# path: /usr/local
# key: ${{runner.os}}-${{matrix.mpi}}-dependencies
#
# - name: Build ${{matrix.mpi}}
# if: steps.cache.outputs.cache-hit != 'true'
# run: |
# case "${{matrix.mpi}}" in
# mpich)
# ;;
# openmpi)
# ./.github/workflows/install-openmpi.sh
# ;;
# esac
#
# - name: Save dependencies
# uses: actions/cache/save@v3
# with:
# path: /usr/local
# key: ${{steps.cache.cache-primary-key}}
- name: Configure MPIwrapper
working-directory: ${{github.workspace}}/mpiwrapper
run: |
cmake \
-B build \
-DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_C_COMPILER=mpicc \
-DCMAKE_Fortran_COMPILER=mpifort \
-DCMAKE_INSTALL_PREFIX=$HOME/mpiwrapper-${{matrix.mpi}}
- name: Build MPIwrapper
working-directory: ${{github.workspace}}/mpiwrapper
run: cmake --build build
# - name: Test
# working-directory: ${{github.workspace}}/build
# run: ctest -C ${{env.BUILD_TYPE}}
- name: Install MPIwrapper
working-directory: ${{github.workspace}}/mpiwrapper
run: cmake --install build
- name: Configure MPItrampoline
working-directory: ${{github.workspace}}/mpitrampoline
run: |
shared=$(echo ${{matrix.shared}} | sed -e 's/shared-//')
cmake \
-B build \
-DBUILD_SHARED_LIBS=${shared} \
-DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_INSTALL_PREFIX=$HOME/mpitrampoline
- name: Build MPItrampoline
working-directory: ${{github.workspace}}/mpitrampoline
run: cmake --build build
# - name: Test
# working-directory: ${{github.workspace}}/build,
# run: ctest -C ${{env.BUILD_TYPE}}
- name: Install MPItrampoline
working-directory: ${{github.workspace}}/mpitrampoline
run: cmake --install build
- name: Test C
working-directory: ${{github.workspace}}/test
run: |
$HOME/mpitrampoline/bin/mpicc -c hello-world-c.c
$HOME/mpitrampoline/bin/mpicc -o hello-world-c hello-world-c.o
export MPITRAMPOLINE_LIB=$HOME/mpiwrapper-${{matrix.mpi}}/lib/libmpiwrapper.so
case "${{matrix.mpi}}" in
mpich) mpiexec_options='';;
openmpi) mpiexec_options='--oversubscribe';;
esac
./hello-world-c 1
mpiexec $mpiexec_options -n 4 ./hello-world-c 4
- name: Test C++
working-directory: ${{github.workspace}}/test
run: |
$HOME/mpitrampoline/bin/mpicxx -c hello-world-cxx.cxx
$HOME/mpitrampoline/bin/mpicxx -o hello-world-cxx hello-world-cxx.o
export MPITRAMPOLINE_LIB=$HOME/mpiwrapper-${{matrix.mpi}}/lib/libmpiwrapper.so
case "${{matrix.mpi}}" in
mpich) mpiexec_options='';;
openmpi) mpiexec_options='--oversubscribe';;
esac
./hello-world-cxx 1
mpiexec $mpiexec_options -n 4 ./hello-world-cxx 4
- name: Test fixed form Fortran
working-directory: ${{github.workspace}}/test
run: |
$HOME/mpitrampoline/bin/mpifc -c hello-world-f.f
$HOME/mpitrampoline/bin/mpifc -o hello-world-f hello-world-f.o
export MPITRAMPOLINE_LIB=$HOME/mpiwrapper-${{matrix.mpi}}/lib/libmpiwrapper.so
case "${{matrix.mpi}}" in
mpich) mpiexec_options='';;
openmpi) mpiexec_options='--oversubscribe';;
esac
./hello-world-f 1
mpiexec $mpiexec_options -n 4 ./hello-world-f 4
- name: Test free form Fortran
working-directory: ${{github.workspace}}/test
run: |
$HOME/mpitrampoline/bin/mpifc -c hello-world-f90.f90
$HOME/mpitrampoline/bin/mpifc -o hello-world-f90 hello-world-f90.o
export MPITRAMPOLINE_LIB=$HOME/mpiwrapper-${{matrix.mpi}}/lib/libmpiwrapper.so
case "${{matrix.mpi}}" in
mpich) mpiexec_options='';;
openmpi) mpiexec_options='--oversubscribe';;
esac
./hello-world-f90 1
mpiexec $mpiexec_options -n 4 ./hello-world-f90 4
- name: Test modern Fortran
working-directory: ${{github.workspace}}/test
run: |
$HOME/mpitrampoline/bin/mpifc -c hello-world-fortran.f90
$HOME/mpitrampoline/bin/mpifc -o hello-world-fortran hello-world-fortran.o
export MPITRAMPOLINE_LIB=$HOME/mpiwrapper-${{matrix.mpi}}/lib/libmpiwrapper.so
case "${{matrix.mpi}}" in
mpich) mpiexec_options='';;
openmpi) mpiexec_options='--oversubscribe';;
esac
./hello-world-fortran 1
mpiexec $mpiexec_options -n 4 ./hello-world-fortran 4