Skip to content

V2: Make CI complain when notebook tests fail #469

V2: Make CI complain when notebook tests fail

V2: Make CI complain when notebook tests fail #469

Workflow file for this run

# ci.yml
#
# Continuous Integration for Chemprop - checks build, code formatting, and runs tests for all
# proposed changes and on a regular schedule
#
# Note: this file contains extensive inline documentation to aid with knowledge transfer.
name: Continuous Integration
on:
# run on pushes/pull requests to/against main
push:
branches: [main]
pull_request:
branches: [main]
# run this in the morning on weekdays to catch dependency issues
schedule:
- cron: "0 8 * * 1-5"
# allow manual runs
workflow_dispatch:
# cancel previously running tests if new commits are made
# https://docs.github.com/en/actions/examples/using-concurrency-expressions-and-a-test-matrix
concurrency:
group: actions-id-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build:
name: Check Build
runs-on: ubuntu-latest
steps:
# clone the repo, attempt to build
- uses: actions/checkout@v4
- run: python -m pip install build
- run: python -m build .
lint:
name: Check Formatting
needs: build
runs-on: ubuntu-latest
steps:
# clone the repo, run black and flake8 on it
- uses: actions/checkout@v4
- run: python -m pip install black==23.* flake8
- run: black --check .
- run: flake8 .
test:
name: Execute Tests
needs: lint
runs-on: ${{ matrix.os }}
defaults:
run:
# run with a login shell (so that the conda environment is activated)
# and echo the commands we run as we do them (for debugging purposes)
shell: bash -el {0}
strategy:
# if one platform/python version fails, continue testing the others
fail-fast: false
matrix:
# test on all platforms with both supported versions of Python
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: [3.11, 3.12]
steps:
- uses: actions/checkout@v4
# use a version of the conda virtual environment manager to set up an
# isolated environment with the Python version we want
- uses: mamba-org/setup-micromamba@main
with:
environment-name: temp
condarc: |
channels:
- defaults
- conda-forge
channel_priority: flexible
create-args: |
python=${{ matrix.python-version }}
- name: Install dependencies
shell: bash -l {0}
run: |
python -m pip install nbmake
python -m pip install ".[dev,docs,test]"
- name: Install hyperopt optional dependencies (Python 3.11 only)
if: ${{ matrix.python-version == '3.11' }}
run: |
python -m pip install ".[hpopt]"
# - name: Test with pytest
# shell: bash -l {0}
# run: |
# pytest -v tests
- name: Test notebooks
shell: bash -l {0}
run: |
pytest --no-cov --nbmake examples/*.ipynb
conda-test:
name: Execute Tests with Conda-based Install
needs: lint
runs-on: ubuntu-latest
defaults:
run:
# run with a login shell (so that the conda environment is activated)
# and echo the commands we run as we do them (for debugging purposes)
shell: bash -el {0}
strategy:
# if one platform/python version fails, continue testing the others
fail-fast: false
matrix:
python-version: [3.11, 3.12]
steps:
- uses: actions/checkout@v4
# use a version of the conda virtual environment manager to set up an
# isolated environment with the Python version we want
- uses: mamba-org/setup-micromamba@main
with:
environment-file: environment.yml
condarc: |
channels:
- defaults
- conda-forge
- pytorch
channel_priority: flexible
create-args: |
python=${{ matrix.python-version }}
- name: Install Testing Dependencies
run: conda install -c conda-forge pytest>=6.2 pytest-cov
- name: Install Chemprop
run: python -m pip install . --no-deps
- name: Test with pytest
shell: bash -l {0}
run: |
pytest -v tests
# GitHub allows you to set checks as required before PRs can be merged, which is annoying to do
# for matrix jobs like the one above which have six actual jobs. Instead we have a summary job
# like this, which we mark as required.
# Copied in part from:
# https://github.com/orgs/community/discussions/26822#discussioncomment-3305794
ci-report-status:
name: report CI status
needs: test
runs-on: ubuntu-latest
steps:
- run: |
result="${{ needs.test.result }}"
if test $result == "success"; then
exit 0
else
exit 1
fi