Add log and step attributes to integer and float hyperparameter #668
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Build discrete-optimization | |
env: | |
MAIN_REPO_NAME: 'airbus/discrete-optimization' | |
on: | |
push: | |
branches: | |
- "**" | |
tags: | |
- 'v[0-9]+.[0-9]+.[0-9]+' | |
pull_request: | |
workflow_dispatch: | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref }} | |
cancel-in-progress: true | |
jobs: | |
linters: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: create requirements.txt so that pip cache with setup-python works | |
run: echo "pre-commit" > requirements_precommit.txt | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: "3.8" | |
cache: "pip" | |
cache-dependency-path: requirements_precommit.txt | |
- name: install pre-commit | |
run: python -m pip install pre-commit | |
- name: get cached pre-commit hooks | |
uses: actions/cache@v4 | |
with: | |
path: ~/.cache/pre-commit | |
key: pre-commit|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }} | |
- name: pre-commit checks | |
run: pre-commit run --show-diff-on-failure --color=always --all-files | |
trigger: | |
# store trigger reason | |
runs-on: ubuntu-latest | |
outputs: | |
is_release: ${{ steps.reason.outputs.is_release }} | |
is_push_on_default_branch: ${{ steps.reason.outputs.is_push_on_default_branch }} | |
steps: | |
- id: reason | |
run: | | |
echo "is_release=${{ startsWith(github.ref, 'refs/tags/v') }}" >> $GITHUB_OUTPUT | |
echo "is_push_on_default_branch=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }}" >> $GITHUB_OUTPUT | |
build: | |
runs-on: ubuntu-latest | |
needs: trigger | |
outputs: | |
do_version: ${{ steps.get_library_version.outputs.version }} | |
steps: | |
- name: Checkout source code | |
uses: actions/checkout@v4 | |
- name: create requirements.txt so that pip cache with setup-python works | |
run: | |
echo "build" > requirements_build.txt | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: "3.8" | |
cache: "pip" | |
cache-dependency-path: requirements_build.txt | |
- name: Install build dependencies | |
run: pip install -U build | |
- name: Build discrete-optimization (wheel + source code) | |
run: python -m build | |
- name: Upload as build artifacts | |
uses: actions/upload-artifact@v4 | |
with: | |
name: dist | |
path: dist | |
- name: get library version and save it | |
id: get_library_version | |
run: | | |
wheelfile=$(ls ./dist/discrete_optimization*.whl) | |
version=$(python -c "print('$wheelfile'.split('-')[1])") | |
echo "version=$version" | |
echo "version=$version" >> $GITHUB_OUTPUT | |
test: | |
needs: build | |
strategy: | |
fail-fast: false | |
matrix: | |
os: ["ubuntu-latest", "macos-12", "macos-latest", "windows-latest"] | |
python-version: ["3.8", "3.11"] | |
wo_gurobi: ["", "without gurobi"] | |
include: | |
- os: "ubuntu-latest" | |
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/squashfs-root/usr/bin; export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/bin/squashfs-root/usr/lib | |
minizinc_cache_path: $(pwd)/bin/squashfs-root | |
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.8.3/MiniZincIDE-2.8.3-x86_64.AppImage | |
minizinc_downloaded_filepath: bin/minizinc.AppImage | |
minizinc_install_cmdline: cd bin; sudo chmod +x minizinc.AppImage; sudo ./minizinc.AppImage --appimage-extract; cd .. | |
minizinc_prerequisites_cmdline: sudo apt update && sudo apt install libegl1 -y | |
- os: "macos-12" | |
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/MiniZincIDE.app/Contents/Resources | |
minizinc_cache_path: $(pwd)/bin/MiniZincIDE.app | |
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.8.3/MiniZincIDE-2.8.3-bundled.dmg | |
minizinc_downloaded_filepath: bin/minizinc.dmg | |
minizinc_install_cmdline: sudo hdiutil attach bin/minizinc.dmg; sudo cp -R /Volumes/MiniZinc*/MiniZincIDE.app bin/. | |
minizinc_prerequisites_cmdline: "" | |
- os: "macos-latest" | |
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/MiniZincIDE.app/Contents/Resources | |
minizinc_cache_path: $(pwd)/bin/MiniZincIDE.app | |
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.8.3/MiniZincIDE-2.8.3-bundled.dmg | |
minizinc_downloaded_filepath: bin/minizinc.dmg | |
minizinc_install_cmdline: sudo hdiutil attach bin/minizinc.dmg; sudo cp -R /Volumes/MiniZinc*/MiniZincIDE.app bin/. | |
minizinc_prerequisites_cmdline: "" | |
- os: "windows-latest" | |
minizinc_config_cmdline: export PATH=$PATH:~/AppData/Local/Programs/MiniZinc | |
minizinc_cache_path: ~/AppData/Local/Programs/MiniZinc | |
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.8.3/MiniZincIDE-2.8.3-bundled-setup-win64.exe | |
minizinc_downloaded_filepath: minizinc_setup.exe | |
minizinc_install_cmdline: cmd //c "minizinc_setup.exe /verysilent /currentuser /norestart /suppressmsgboxes /sp" | |
minizinc_prerequisites_cmdline: "" | |
- coverage: false # generally no coverage to avoid multiple reports | |
- coverage: true # coverage only for one entry of the matrix | |
os: "ubuntu-latest" | |
python-version: "3.11" | |
wo_gurobi: "" | |
exclude: | |
- os: "windows-latest" | |
wo_gurobi: "without gurobi" | |
- os: "macos-12" | |
wo_gurobi: "without gurobi" | |
- os: "macos-latest" | |
wo_gurobi: "without gurobi" | |
- os: "ubuntu-latest" | |
wo_gurobi: "without gurobi" | |
python-version: "3.8" | |
- os: "macos-latest" | |
python-version: "3.8" | |
- os: "macos-12" | |
python-version: "3.11" | |
runs-on: ${{ matrix.os }} | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- name: Checkout discrete-optimization source code | |
uses: actions/checkout@v4 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
cache: "pip" | |
cache-dependency-path: pyproject.toml | |
- name: Download artifacts | |
uses: actions/download-artifact@v4 | |
with: | |
name: dist | |
path: dist | |
- name: Prevent installing newest release or ortools on windows (segmentation fault) | |
if: matrix.os == 'windows-latest' | |
run: | | |
python -m pip install -U pip | |
pip install "ortools<9.10" | |
- name: Install only discrete-optimization | |
run: | | |
python -m pip install -U pip | |
wheelfile=$(ls ./dist/discrete_optimization*.whl) | |
pip install ${wheelfile} | |
- name: Check import work without minizinc if DO_SKIP_MZN_CHECK set | |
run: | | |
export DO_SKIP_MZN_CHECK=1 | |
python -c "import discrete_optimization" | |
- name: Check import fails without minizinc if DO_SKIP_MZN_CHECK unset | |
run: | | |
python -c " | |
try: | |
import discrete_optimization | |
except RuntimeError: | |
pass | |
else: | |
raise AssertionError('We should not be able to import discrete_optimization without minizinc being installed.') | |
" | |
- name: Create bin/ | |
run: mkdir -p bin | |
- name: Minizinc prerequisites | |
run: | | |
${{ matrix.minizinc_prerequisites_cmdline }} | |
- name: get MiniZinc path to cache | |
id: get-mzn-cache-path | |
run: | | |
echo "path=${{ matrix.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables | |
- name: Restore MiniZinc cache | |
id: cache-minizinc | |
uses: actions/cache@v4 | |
with: | |
path: ${{ steps.get-mzn-cache-path.outputs.path }} | |
key: ${{ matrix.minizinc_url }} | |
- name: Download MiniZinc | |
if: steps.cache-minizinc.outputs.cache-hit != 'true' | |
run: | | |
curl -o "${{ matrix.minizinc_downloaded_filepath }}" -L ${{ matrix.minizinc_url }} | |
- name: Install MiniZinc | |
if: steps.cache-minizinc.outputs.cache-hit != 'true' | |
run: | | |
${{ matrix.minizinc_install_cmdline }} | |
- name: Test minizinc install | |
run: | | |
${{ matrix.minizinc_config_cmdline }} | |
minizinc --version | |
- name: Check imports are working | |
run: | | |
# configure minizinc | |
${{ matrix.minizinc_config_cmdline }} | |
# check imports | |
python tests/test_import_all_submodules.py | |
- name: Install test dependencies | |
run: | | |
wheelfile=$(ls ./dist/discrete_optimization*.whl) | |
pip install ${wheelfile}[test] | |
if [ "${{ matrix.wo_gurobi }}" != "without gurobi" ]; then | |
echo "install gurobi" | |
pip install gurobipy | |
python -c "import gurobipy" | |
fi | |
- name: Restore tests data cache | |
id: cache-data | |
uses: actions/cache@v4 | |
with: | |
path: ~/discrete_optimization_data | |
key: data-${{ hashFiles('discrete_optimization/datasets.py') }} | |
- name: Fetch data for tests | |
if: steps.cache-data.outputs.cache-hit != 'true' | |
run: | | |
${{ matrix.minizinc_config_cmdline }} | |
python -m discrete_optimization.datasets | |
- name: Test with pytest (no coverage) | |
if: ${{ !matrix.coverage }} | |
run: | | |
# configure minizinc | |
${{ matrix.minizinc_config_cmdline }} | |
# show library path used | |
pytest -s tests/show_do_path.py | |
# run test suite | |
MPLBACKEND="agg" NUMBA_BOUNDSCHECK=1 pytest \ | |
-v --durations=0 --durations-min=10 \ | |
tests | |
- name: Test with pytest (with coverage) | |
if: ${{ matrix.coverage }} | |
run: | | |
# configure minizinc | |
${{ matrix.minizinc_config_cmdline }} | |
# create a tmp directory from which running the tests | |
# so that "--cov discrete_optimization" look for package installed via the wheel | |
# instead of the source directory in the repository (which would lead to a coverage of 0%) | |
mkdir -p tmp && cd tmp | |
# show library path used | |
pytest -s ../tests/show_do_path.py | |
# run test suite | |
MPLBACKEND="agg" NUMBA_BOUNDSCHECK=1 pytest \ | |
--cov discrete_optimization \ | |
--cov-report xml:coverage.xml \ | |
--cov-report html:coverage_html \ | |
--cov-report term \ | |
-v --durations=0 --durations-min=10 \ | |
../tests | |
cd .. | |
- name: Upload coverage report as artifact | |
if: ${{ matrix.coverage }} | |
uses: actions/upload-artifact@v4 | |
with: | |
name: coverage | |
path: | | |
tmp/coverage.xml | |
tmp/coverage_html | |
update-notebooks-for-colab-and-binder: | |
runs-on: ubuntu-latest | |
needs: [trigger, build] | |
outputs: | |
notebooks-branch: ${{ steps.write-output.outputs.notebooks_branch }} | |
binder-full-ref: ${{ steps.write-output.outputs.binder_full_ref }} | |
steps: | |
- name: Set default notebooks branch and binder env full ref | |
run: | | |
# default value: current branch | |
echo 'notebooks_branch="${{ github.ref_name}}"' >> $GITHUB_ENV | |
echo 'binder_full_ref="${{ github.repository }}/${{ github.ref_name}}"' >> $GITHUB_ENV | |
- uses: actions/checkout@v4 | |
if: needs.trigger.outputs.is_release == 'true' | |
- name: replace d-o version to install in colab notebooks | |
if: needs.trigger.outputs.is_release == 'true' | |
run: | | |
version=${{ needs.build.outputs.do_version }} | |
old_pip_spec_pattern="\(pip.*install.*\)git+https.*egg=discrete-optimization" | |
new_pip_spec_pattern="\1discrete-optimization==$version" | |
if ${{ github.repository != env.MAIN_REPO_NAME && secrets.TEST_PYPI_API_TOKEN != '' }} == 'true'; then | |
# install from TestPypi if on a fork | |
new_pip_spec_pattern="${new_pip_spec_pattern} --extra-index-url https://test.pypi.org/simple/" | |
fi | |
shopt -s globstar # enable ** | |
sed -i -e "s|${old_pip_spec_pattern}|${new_pip_spec_pattern}|" notebooks/**/*.ipynb | |
- name: replace d-o version to install in binder environment | |
if: needs.trigger.outputs.is_release == 'true' | |
run: | | |
version=${{ needs.build.outputs.do_version }} | |
linefilter="/^name/!" | |
old_pip_spec_pattern="\(\s*\)-.*discrete-optimization.*$" | |
new_pip_spec_pattern="\1- discrete-optimization==$version" | |
if ${{ github.repository != env.MAIN_REPO_NAME && secrets.TEST_PYPI_API_TOKEN != '' }} == 'true'; then | |
# install from TestPypi if on a fork | |
new_pip_spec_pattern="${new_pip_spec_pattern}\n\1- --extra-index-url https://test.pypi.org/simple/" | |
fi | |
sed_command="${linefilter}s|${old_pip_spec_pattern}|${new_pip_spec_pattern}|" | |
echo sed -i -e ${sed_command} binder/environment.yml | |
sed -i -e "${sed_command}" binder/environment.yml | |
- name: push modifications on a dedicated tag | |
if: needs.trigger.outputs.is_release == 'true' | |
id: push-tuto-release-tag | |
run: | | |
current_tag_name=${GITHUB_REF/refs\/tags\//} # stripping refs/tags/ | |
new_tag_name="notebooks-${current_tag_name}" | |
echo ${new_tag_name} | |
git config user.name "Actions" | |
git config user.email "actions@github.com" | |
git commit binder/environment.yml notebooks -m "Install appropriate version of discrete-optimization" | |
git tag ${new_tag_name} -m "Use release ${current_tag_name} in binder and colab" | |
git push origin ${new_tag_name} | |
# store new tag name as notebooks branch | |
echo "notebooks_branch=${new_tag_name}" >> $GITHUB_ENV | |
echo "binder_full_ref=${{ github.repository }}/${new_tag_name}" >> $GITHUB_ENV | |
- name: write new notebooks branch in job outputs | |
id: write-output | |
run: | | |
echo "notebooks_branch=${notebooks_branch}" >> $GITHUB_OUTPUT | |
echo "binder_full_ref=${binder_full_ref}" >> $GITHUB_OUTPUT | |
build-doc: | |
needs: [update-notebooks-for-colab-and-binder] | |
uses: ./.github/workflows/build-doc.yml | |
with: | |
notebooks-branch: ${{ needs.update-notebooks-for-colab-and-binder.outputs.notebooks-branch }} | |
doc-prerequisites-cmdline: export DO_SKIP_MZN_CHECK=1 | |
deploy: | |
# for release tags | |
runs-on: ubuntu-latest | |
needs: [trigger, test] | |
if: needs.trigger.outputs.is_release == 'true' | |
steps: | |
- name: Download wheels artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: dist | |
path: dist | |
- name: Create the github release | |
uses: ncipollo/release-action@v1 | |
with: | |
artifacts: dist | |
generateReleaseNotes: true | |
- name: Publish package to TestPyPI (only for forks) | |
env: | |
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} | |
if: github.repository != env.MAIN_REPO_NAME && env.TEST_PYPI_API_TOKEN != '' | |
uses: pypa/gh-action-pypi-publish@release/v1 | |
with: | |
password: ${{ secrets.TEST_PYPI_API_TOKEN }} | |
repository_url: https://test.pypi.org/legacy/ | |
- name: Publish package to PyPI (main repo) | |
env: | |
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }} | |
if: github.repository == env.MAIN_REPO_NAME && env.PYPI_API_TOKEN != '' | |
uses: pypa/gh-action-pypi-publish@release/v1 | |
with: | |
password: ${{ secrets.PYPI_API_TOKEN }} | |
deploy-doc: | |
needs: [trigger, update-notebooks-for-colab-and-binder, build-doc, test, deploy] | |
# if: always() | |
# -> trigger even if one needed job was skipped (namely deploy) | |
# -> needed jobs successes must be checked explicitely | |
if: | | |
always() | |
&& (needs.build-doc.result == 'success') | |
&& ( | |
( | |
(needs.trigger.outputs.is_push_on_default_branch == 'true') | |
&& (needs.test.result == 'success') | |
) | |
|| ( | |
(needs.trigger.outputs.is_release == 'true') | |
&& (needs.deploy.result == 'success') | |
) | |
) | |
uses: ./.github/workflows/deploy-doc.yml | |
with: | |
doc-version: ${{ needs.build-doc.outputs.doc-version }} | |
binder-env-fullref: ${{ needs.update-notebooks-for-colab-and-binder.outputs.binder-full-ref }} |