Skip to content

Commit

Permalink
Separate dask job (#2376)
Browse files Browse the repository at this point in the history
* Moved dask tests to separate dir.

* Moved other dask tests.

* Changed make file to add dask tests only and remove dask tests from all the other test options.

* Added two tests in make file to run unit tests on only the dask stuff with the number of workers explicitly set to one.

* Changed name of the dask yml tests.

* Updated release notes.

* Forgot to change the names that matter.

* Added timeouts to the dask tests.

* Trying turning off broadcasting for scatter.

* Reduced timeout to 4 minutes for quicker testing.

* Changed name of the dask-only tests.

* Separated Windows unit tests that use dask.

* Reverted scatter's broadcast=False and now it's True.

* 5 minute timeout.

* fail fast off
  • Loading branch information
chukarsten committed Jun 14, 2021
1 parent ca42b84 commit a096a7c
Show file tree
Hide file tree
Showing 7 changed files with 179 additions and 5 deletions.
87 changes: 87 additions & 0 deletions .github/workflows/linux_unit_tests_with_dask.yml
@@ -0,0 +1,87 @@
name: Linux Unit Tests (Dask Only)

on:
pull_request:
types: [opened, synchronize]
push:
branches:
- main

jobs:
unit_tests:
name: ${{ matrix.python_version }} unit tests, Core Dependencies (${{ matrix.core_dependencies }}), Codecov (${{ matrix.codecov }})
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- python_version: "3.7"
core_dependencies: false
codecov: false
- python_version: "3.8"
core_dependencies: false
codecov: true
- python_version: "3.8"
core_dependencies: true
codecov: true
- python_version: "3.9"
core_dependencies: false
codecov: false
steps:
- name: Set up Python ${{ matrix.python_version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python_version }}
- name: Checkout repository
uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: 2
- name: Update apt and install Graphviz
run: sudo apt update && sudo apt install -y graphviz
- if: ${{ matrix.core_dependencies }}
name: Installing Core Dependencies
run: |
pip install virtualenv
virtualenv test_python -q
source test_python/bin/activate
pip install --upgrade pip -q
pip install -e . --no-dependencies
pip install -r core-requirements.txt
pip install -r test-requirements.txt
# "!" negates return code. exit nonzero if any of these deps are found
! pip freeze | grep -E "xgboost|catboost|lightgbm|plotly|ipywidgets|category_encoders"
exit $?
- if: ${{ !matrix.core_dependencies }}
name: Installing Dependencies
run: |
pip install virtualenv
virtualenv test_python -q
source test_python/bin/activate
make installdeps
make installdeps-test
pip freeze
- name: Erase Coverage
run: |
source test_python/bin/activate
coverage erase
- if: ${{ matrix.core_dependencies }}
name: Run dask unit tests with minimal dependencies
run: |
source test_python/bin/activate
make git-test-minimal-deps-dask
- if: ${{ !matrix.core_dependencies }}
name: Run unit tests
run: |
source test_python/bin/activate
make git-test-dask
- if: ${{ matrix.codecov }}
name: install coverage
run: pip install coverage
- if: ${{ matrix.codecov }}
name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: true
74 changes: 74 additions & 0 deletions .github/workflows/windows_unit_tests_with_dask.yml
@@ -0,0 +1,74 @@
name: Windows Unit Tests (Dask Only)

on:
pull_request:
types: [opened, synchronize]
push:
branches:
- main

jobs:
win_unit_tests:
name: ${{ matrix.python_version }} windows unit tests
runs-on: windows-latest
strategy:
fail-fast: true
matrix:
python_version: [ "3.7", "3.8" ]
steps:
- name: Download Miniconda
shell: pwsh
run: |
$File = "Miniconda3-4.7.12.1-Windows-x86_64.exe"
$Uri = "https://repo.anaconda.com/miniconda/$File"
$ProgressPreference = "silentlyContinue"
Invoke-WebRequest -Uri $Uri -Outfile "$env:USERPROFILE/$File"
$hashFromFile = Get-FileHash "$env:USERPROFILE/$File" -Algorithm SHA256
$hashFromUrl = "f18060cc0bb50ae75e4d602b7ce35197c8e31e81288d069b758594f1bb46ab45"
if ($hashFromFile.Hash -ne "$hashFromUrl") {
Throw "File hashes did not match!"
}
- name: Install Miniconda
shell: cmd
run: start /wait "" %UserProfile%\Miniconda3-4.7.12.1-Windows-x86_64.exe /InstallationType=JustMe /RegisterPython=0 /AddToPath=1 /S /D=%UserProfile%\Miniconda3
- name: Set Python ${{ matrix.python_version }} Version
shell: pwsh
run: |
. $env:USERPROFILE\Miniconda3\shell\condabin\conda-hook.ps1
conda create -n curr_py python=${{ matrix.python_version }}
- name: Checkout repository
uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Install make
run: |
. $env:USERPROFILE\Miniconda3\shell\condabin\conda-hook.ps1
conda config --add channels conda-forge
conda activate curr_py
conda install make -q -y
- name: Install Graphviz
run: |
. $env:USERPROFILE\Miniconda3\shell\condabin\conda-hook.ps1
conda activate curr_py
conda install python-graphviz -q -y
- name: Install numba (for shap)
run: |
. $env:USERPROFILE\Miniconda3\shell\condabin\conda-hook.ps1
conda config --add channels conda-forge
conda activate curr_py
conda install numba -q -y
- name: Install EvalML with test requirements
shell: pwsh
run: |
. $env:USERPROFILE\Miniconda3\shell\condabin\conda-hook.ps1
conda activate curr_py
python -m pip install --upgrade pip
python -m pip install .
python -m pip install -r test-requirements.txt
pip freeze
- name: Run unit tests
run: |
. $env:USERPROFILE\Miniconda3\shell\condabin\conda-hook.ps1
conda activate curr_py
make git-test-dask
22 changes: 17 additions & 5 deletions Makefile
Expand Up @@ -19,23 +19,35 @@ lint-fix:

.PHONY: test
test:
pytest evalml/ --doctest-modules --doctest-continue-on-failure
pytest evalml/ --doctest-modules --doctest-continue-on-failure --ignore=evalml/tests/automl_tests/dask_tests

.PHONY: test-dask
test:
pytest evalml/tests/automl_tests/dask_tests/ --doctest-modules --doctest-continue-on-failure --timeout 300

.PHONY: git-test
git-test:
pytest evalml/ -n 2 --doctest-modules --cov=evalml --junitxml=test-reports/junit.xml --doctest-continue-on-failure --timeout 360
pytest evalml/ -n 2 --doctest-modules --cov=evalml --junitxml=test-reports/junit.xml --doctest-continue-on-failure --ignore=evalml/tests/automl_tests/dask_tests --timeout 300

.PHONY: git-test-dask
git-test-dask:
pytest evalml/tests/automl_tests/dask_tests/ -n 1 --doctest-modules --cov=evalml/tests/automl_tests/dask_tests/ --junitxml=test-reports/junit.xml --doctest-continue-on-failure --timeout 300

.PHONY: git-test-nocov
git-test-nocov:
pytest evalml/ -n 2 --doctest-modules --doctest-continue-on-failure --timeout 360
pytest evalml/ -n 2 --doctest-modules --doctest-continue-on-failure --ignore=evalml/tests/automl_tests/dask_tests --timeout 300

.PHONY: git-test-minimal-deps
git-test-minimal-deps:
pytest evalml/ -n 2 --doctest-modules --cov=evalml --junitxml=test-reports/junit.xml --doctest-continue-on-failure --has-minimal-dependencies --timeout 360
pytest evalml/ -n 2 --doctest-modules --cov=evalml --junitxml=test-reports/junit.xml --doctest-continue-on-failure --has-minimal-dependencies --ignore=evalml/tests/automl_tests/dask_tests --timeout 300

.PHONY: git-test-minimal-deps-dask
git-test-minimal-deps-dask:
pytest evalml/tests/automl_tests/dask_tests/ -n 1 --doctest-modules --cov=evalml/tests/automl_tests/dask_tests/ --junitxml=test-reports/junit.xml --doctest-continue-on-failure --has-minimal-dependencies

.PHONY: git-test-minimal-deps-nocov
git-test-minimal-deps-nocov:
pytest evalml/ -n 2 --doctest-modules --doctest-continue-on-failure --has-minimal-dependencies --timeout 360
pytest evalml/ -n 2 --doctest-modules --doctest-continue-on-failure --has-minimal-dependencies --ignore=evalml/tests/automl_tests/dask_tests --timeout 300

.PHONY: installdeps
installdeps:
Expand Down
1 change: 1 addition & 0 deletions docs/source/release_notes.rst
Expand Up @@ -8,6 +8,7 @@ Release Notes
* Documentation Changes
* Testing Changes
* Add ``pytest-timeout``. All tests that run longer than 6 minutes will fail. :pr:`2374`
* Separated the dask tests out into separate github action jobs to isolate dask failures. :pr:`2376`
* Refactored dask tests :pr:`2377`

.. warning::
Expand Down
Empty file.

0 comments on commit a096a7c

Please sign in to comment.