[ENH] add caching action for elephant-data #889
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This workflow will set up GitHub-hosted runners and install the required dependencies for elephant tests. | |
# On a pull requests and on pushes to master it will run different tests for elephant. | |
name: tests | |
# define events that trigger workflow 'tests' | |
on: | |
workflow_dispatch: # enables manual triggering of workflow | |
inputs: | |
logLevel: | |
description: 'Log level' | |
required: true | |
default: 'warning' | |
type: choice | |
options: | |
- info | |
- warning | |
- debug | |
pull_request: | |
branches: | |
- master | |
types: | |
#- assigned | |
#- unassigned | |
#- labeled | |
#- unlabeled | |
- opened | |
#- edited | |
#- closed | |
- reopened | |
- synchronize | |
#- converted_to_draft | |
#- ready_for_review | |
#- locked | |
#- unlocked | |
#- review_requested | |
#- review_request_removed | |
#- auto_merge_enabled | |
#- auto_merge_disabled | |
push: | |
branches: | |
- master | |
# Cancel previous workflows on the same pull request | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref }} | |
cancel-in-progress: true | |
# jobs define the steps that will be executed on the runner | |
jobs: | |
# _ | |
# _ __ (_)_ __ | |
# | '_ \| | '_ \ | |
# | |_) | | |_) | | |
# | .__/|_| .__/ | |
# |_| |_| | |
# install dependencies and elephant with pip and run tests with pytest | |
build-and-test-pip: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, "3.10", 3.11, 3.12] | |
python-version: [3.9, "3.10", 3.11, 3.12] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
# do not cancel all in-progress jobs if any matrix job fails | |
fail-fast: false | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: ${{ matrix.python-version }} | |
check-latest: true | |
cache: 'pip' | |
cache-dependency-path: | | |
**/requirements.txt | |
**/requirements-extras.txt | |
**/requirements-tests.txt | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/restore@v3 | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: ${{ runner.os }}-datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: ${{ runner.os }}-datasets- | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
pip install coveralls | |
pip install -e .[extras,tests] | |
- name: List packages | |
run: | | |
pip list | |
python --version | |
- name: Test with pytest | |
run: | | |
if [ -d ~/elephant-data ]; then | |
export ELEPHANT_DATA_PATH=~/elephant-data | |
echo $ELEPHANT_DATA_PATH | |
fi | |
coverage run --source=elephant -m pytest | |
coveralls --service=github || echo "Coveralls submission failed" | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
# ___ ____ | |
# _ __ ___ __ _ ___ / _ \/ ___| | |
# | '_ ` _ \ / _` |/ __| | | \___ \ | |
# | | | | | | (_| | (__| |_| |___) | | |
# |_| |_| |_|\__,_|\___|\___/|____/ | |
test-macOS: | |
name: conda (${{ matrix.python-version }}, ${{ matrix.os }}) | |
runs-on: ${{ matrix.os }} | |
strategy: | |
# do not cancel all in-progress jobs if any matrix job fails | |
fail-fast: false | |
matrix: | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [macos-11,macos-12] | |
python-version: [3.11] | |
steps: | |
- name: Get current year-month | |
id: date | |
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT | |
- uses: actions/checkout@v3 | |
- name: Cache conda | |
uses: actions/cache@v3 | |
with: | |
path: ~/conda_pkgs_dir | |
key: ${{ runner.os }}-conda-${{hashFiles('requirements/environment.yml') }}-${{ hashFiles('**/CI.yml') }}-${{ steps.date.outputs.date }} | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/restore@v3 | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: ${{ runner.os }}-datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: ${{ runner.os }}-datasets- | |
- uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # corresponds to v3.0.3 | |
with: | |
auto-update-conda: true | |
python-version: ${{ matrix.python-version }} | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
activate-environment: elephant | |
environment-file: requirements/environment-tests.yml | |
- name: Install dependencies | |
shell: bash -l {0} | |
run: | | |
python --version | |
mamba install pytest pytest-cov coveralls | |
pip install -e .[extras] | |
- name: List packages | |
shell: bash -l {0} | |
run: | | |
pip list | |
mamba list | |
python --version | |
- name: Test with pytest | |
shell: bash -l {0} | |
run: | | |
if [ -d ~/elephant-data ]; then | |
export ELEPHANT_DATA_PATH=~/elephant-data | |
echo $ELEPHANT_DATA_PATH | |
fi | |
pytest --cov=elephant | |
# __ ___ _ | |
# \ \ / (_)_ __ __| | _____ _____ | |
# \ \ /\ / /| | '_ \ / _` |/ _ \ \ /\ / / __| | |
# \ V V / | | | | | (_| | (_) \ V V /\__ \ | |
# \_/\_/ |_|_| |_|\__,_|\___/ \_/\_/ |___/ | |
# install dependencies with pip and run tests with pytest | |
test-pip: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, 3.10, 3.11] | |
python-version: [3.11,] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [windows-latest] | |
steps: | |
- name: Get current year-month | |
id: date | |
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT | |
- uses: actions/checkout@v3 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Cache pip | |
uses: actions/cache@v3 | |
with: | |
path: ~\AppData\Local\pip\Cache | |
# Look to see if there is a cache hit for the corresponding requirements files | |
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}-${{ hashFiles('**/requirements-tests.txt') }} | |
-${{ hashFiles('**/requirements-extras.txt') }}-${{ hashFiles('setup.py') }} -${{ hashFiles('**/CI.yml') }}-${{ steps.date.outputs.date }} | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/restore@v3 | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: ${{ runner.os }}-datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: ${{ runner.os }}-datasets- | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
pip install pytest-cov coveralls | |
pip install -e .[extras,tests] | |
- name: List packages | |
run: | | |
pip list | |
python --version | |
- name: Test with pytest | |
run: | | |
if (Test-Path "$env:~\elephant-data") { | |
$env:ELEPHANT_DATA_PATH = "$env:~\elephant-data" | |
Write-Output $env:ELEPHANT_DATA_PATH | |
} | |
pytest --cov=elephant | |
# __ __ ____ ___ | |
# | \/ | _ \_ _| | |
# | |\/| | |_) | | | |
# | | | | __/| | | |
# |_| |_|_| |___| | |
# install dependencies and elephant with pip and run MPI | |
test-pip-MPI: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, 3.10, 3.11] | |
python-version: [3.9] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
# do not cancel all in-progress jobs if any matrix job fails | |
fail-fast: false | |
steps: | |
- name: Get current year-month | |
id: date | |
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT | |
- uses: actions/checkout@v3 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Get pip cache dir | |
id: pip-cache | |
run: | | |
echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT | |
- name: Cache test_env | |
uses: actions/cache@v3 | |
with: | |
path: ${{ steps.pip-cache.outputs.dir }} | |
# look to see if there is a cache hit for the corresponding requirements files | |
# cache will be reset on changes to any requirements or every month | |
key: ${{ runner.os }}-venv-${{ hashFiles('**/requirements.txt') }}-${{ hashFiles('**/requirements-tests.txt') }} | |
-${{ hashFiles('**/requirements-extras.txt') }}-${{ hashFiles('setup.py') }} -${{ hashFiles('**/CI.yml') }}-${{ steps.date.outputs.date }} | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/restore@v3 | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: ${{ runner.os }}-datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: ${{ runner.os }}-datasets- | |
- name: Setup environment | |
run: | | |
sudo apt-get update | |
sudo apt install -y libopenmpi-dev openmpi-bin | |
python -m pip install --upgrade pip | |
pip install mpi4py | |
pip install pytest-cov coveralls | |
pip install -e .[extras,tests] | |
- name: List packages | |
run: | | |
pip list | |
python --version | |
- name: Test with pytest | |
run: | | |
if [ -d ~/elephant-data ]; then | |
export ELEPHANT_DATA_PATH=~/elephant-data | |
echo $ELEPHANT_DATA_PATH | |
fi | |
mpiexec -n 1 python -m mpi4py -m coverage run --source=elephant -m pytest | |
coveralls --service=github || echo "Coveralls submission failed" | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
# ____ _ | |
# / ___|___ _ __ __| | __ _ | |
# | | / _ \| '_ \ / _` |/ _` | | |
# | |__| (_) | | | | (_| | (_| | | |
# \____\___/|_| |_|\__,_|\__,_| | |
# install dependencies with conda and run tests with pytest | |
test-conda: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, 3.10, 3.11] | |
python-version: [3.11] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
# do not cancel all in-progress jobs if any matrix job fails | |
fail-fast: false | |
steps: | |
- name: Get current year-month | |
id: date | |
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT | |
- uses: actions/checkout@v3 | |
- name: Get pip cache dir | |
id: pip-cache | |
run: | | |
echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT | |
- name: Cache pip | |
uses: actions/cache@v3 | |
with: | |
path: ${{ steps.pip-cache.outputs.dir }} | |
key: ${{ runner.os }}-pip-${{hashFiles('requirements/environment-tests.yml') }}-${{ hashFiles('**/CI.yml') }}-${{ steps.date.outputs.date }} | |
- name: Get current hash (SHA) of the elephant_data repo | |
id: elephant-data | |
run: | | |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/elephant-data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT | |
- uses: actions/cache/restore@v3 | |
# Loading cache of elephant-data | |
id: cache-datasets | |
with: | |
path: ~/elephant-data | |
key: ${{ runner.os }}-datasets-${{ steps.elephant-data.outputs.dataset_hash }} | |
restore-keys: ${{ runner.os }}-datasets- | |
- uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # corresponds to v3.0.3 | |
with: | |
auto-update-conda: true | |
python-version: ${{ matrix.python-version }} | |
mamba-version: "*" | |
channels: conda-forge,defaults | |
channel-priority: true | |
activate-environment: elephant | |
environment-file: requirements/environment-tests.yml | |
- name: Install dependencies | |
shell: bash -el {0} | |
run: | | |
python --version | |
conda install mpi4py openmpi | |
mamba install pytest pytest-cov coveralls | |
pip install -e . | |
- name: List packages | |
shell: bash -el {0} | |
run: | | |
pip list | |
mamba list | |
python --version | |
- name: Test with pytest | |
shell: bash -el {0} | |
run: | | |
if [ -d ~/elephant-data ]; then | |
export ELEPHANT_DATA_PATH=~/elephant-data | |
echo $ELEPHANT_DATA_PATH | |
fi | |
pytest --cov=elephant | |
# ____ | |
# | _ \ ___ ___ ___ | |
# | | | |/ _ \ / __/ __| | |
# | |_| | (_) | (__\__ \ | |
# |____/ \___/ \___|___/ | |
# install dependencies for the documentation and build .html | |
docs: | |
name: docs (${{ matrix.os }}) | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.8, 3.9, 3.10, 3.11, 3.12] | |
python-version: [3.12] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
steps: | |
- name: Get current year-month | |
id: date | |
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT | |
- uses: actions/checkout@v3 | |
- name: Get pip cache dir | |
id: pip-cache | |
run: | | |
echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT | |
- name: Cache pip | |
uses: actions/cache@v3 | |
with: | |
path: ${{ steps.pip-cache.outputs.dir }} | |
# Look to see if there is a cache hit for the corresponding requirements files | |
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements-docs.txt') }}-${{ hashFiles('**/requirements-tutorials.txt') }}-${{ hashFiles('**/environment-docs.yml') }} | |
-${{ hashFiles('**/CI.yml') }}-${{ steps.date.outputs.date }} | |
- uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # corresponds to v3.0.3 | |
with: | |
auto-update-conda: true | |
python-version: ${{ matrix.python-version }} | |
mamba-version: "*" | |
activate-environment: elephant | |
environment-file: requirements/environment.yml | |
- name: Install dependencies | |
shell: bash -el {0} # enables conda incubator to activate environment | |
run: | | |
sudo apt-get update | |
sudo apt install -y libopenmpi-dev openmpi-bin | |
conda install -c conda-forge openmpi pandoc libstdcxx-ng # fix libstdc++.so.6: version for new scipy versions > 1.9.1 | |
mamba env update --file requirements/environment-docs.yml --name elephant | |
python -m pip install --upgrade pip | |
pip install -e .[extras,tutorials,docs] | |
# run notebooks | |
sed -i -E "s/nbsphinx_execute *=.*/nbsphinx_execute = 'always'/g" doc/conf.py | |
- name: List packages | |
shell: bash -el {0} | |
run: | | |
pip list | |
mamba list | |
python --version | |
- name: make html | |
shell: bash -el {0} | |
run: | | |
cd doc | |
make html | |
# install dependencies and elephant with pip and run tests with pytest | |
doctests: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# python versions for elephant: [3.7, 3.8, 3.9, "3.10"] | |
python-version: ["3.10"] | |
# OS [ubuntu-latest, macos-latest, windows-latest] | |
os: [ubuntu-latest] | |
steps: | |
# used to reset cache every month | |
- name: Get current year-month | |
id: date | |
run: echo "::set-output name=date::$(date +'%Y-%m')" | |
- uses: actions/checkout@v3 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Cache test_env | |
uses: actions/cache@v3 | |
with: | |
path: ~/test_env | |
# Look to see if there is a cache hit for the corresponding requirements files | |
# cache will be reset on changes to any requirements or every month | |
key: ${{ runner.os }}-venv-${{ hashFiles('**/requirements.txt') }}-${{ hashFiles('**/requirements-tests.txt') }} | |
-${{ hashFiles('**/requirements-extras.txt') }}-${{ hashFiles('**/CI.yml') }}-${{ hashFiles('setup.py') }} | |
-${{ steps.date.outputs.date }} | |
- name: Install dependencies | |
run: | | |
# create an environment and install everything | |
python -m venv ~/test_env | |
source ~/test_env/bin/activate | |
sudo apt install -y libopenmpi-dev openmpi-bin | |
python -m pip install --upgrade pip | |
pip install mpi4py | |
pip install pytest-cov coveralls | |
pip install -e .[extras,tests] | |
- name: List packages | |
run: | | |
source ~/test_env/bin/activate | |
pip list | |
python --version | |
- name: Run doctests | |
run: | | |
source ~/test_env/bin/activate | |
pytest elephant --doctest-modules --ignore=elephant/test/ |