Skip to content

Run python3 dev/update_pypi_package_index.py #942

Run python3 dev/update_pypi_package_index.py

Run python3 dev/update_pypi_package_index.py #942

Workflow file for this run

name: MLflow tests
on:
pull_request:
push:
branches:
- master
- branch-[0-9]+.[0-9]+
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true
# Use `bash --noprofile --norc -exo pipefail` by default for all `run` steps in this workflow:
# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#defaultsrun
defaults:
run:
shell: bash --noprofile --norc -exo pipefail {0}
env:
# Note miniconda is pre-installed in the virtual environments for GitHub Actions:
# https://github.com/actions/virtual-environments/blob/main/images/linux/scripts/installers/miniconda.sh
MLFLOW_CONDA_HOME: /usr/share/miniconda
SPARK_LOCAL_IP: localhost
jobs:
lint:
runs-on: ubuntu-latest
timeout-minutes: 45
if: github.event_name == 'pull_request' && github.event.pull_request.draft == false
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/cache-pip
- name: Add problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/pylint.json"
echo "::add-matcher::.github/workflows/matchers/black.json"
echo "::add-matcher::.github/workflows/matchers/ruff.json"
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh --ml
pip install -r requirements/lint-requirements.txt
- uses: ./.github/actions/pipdeptree
- name: Test custom pylint-plugins
run: |
pytest pylint_plugins/tests
- name: Install pre-commit hooks
run: |
pre-commit install -t pre-commit -t prepare-commit-msg
- name: Run pre-commit (only changed files)
id: run-pre-commit
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# pylint is ridiculously slow. It takes ~3 minutes to run on all files.
# To fail fast for faster iteration, first run pylint only on changed files,
# then run it on all files in the next step.
changed_files=$(python dev/list_changed_files.py --repository mlflow/mlflow --pr-num ${{ github.event.number }})
pre-commit run --files $changed_files
- name: Run pre-commit (all files)
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
changed_files=$(python dev/list_changed_files.py --repository mlflow/mlflow --pr-num ${{ github.event.number }})
if echo "$changed_files" | grep -q '\.py$'; then
pre-commit run --all-files
fi
- name: Update error messages
if: always() && steps.run-pre-commit.outcome == 'failure'
run: |
if [ -f ".black.log" ]; then
grep -o 'reformatted .*\.py' .black.log | sed 's/reformatted \(.*\.py\)/\1: This file is unformatted. Run `black .` or comment `@mlflow-automation autoformat` on the PR if you'\''re an MLflow maintainer./'
fi
# python-skinny tests cover a subset of mlflow functionality
# that is meant to be supported with a smaller dependency footprint.
# The python skinny tests cover the subset of mlflow functionality
# while also verifying certain dependencies are omitted.
python-skinny:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: ./.github/actions/setup-python
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh --skinny
- name: Run tests
run: |
./dev/run-python-skinny-tests.sh
- uses: ./.github/actions/untracked
python:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 120
env:
JOHNSNOWLABS_LICENSE_JSON: ${{ secrets.JOHNSNOWLABS_LICENSE_JSON }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-pyenv
- uses: ./.github/actions/setup-java
with:
java-version: 11
distribution: "adopt"
- uses: ./.github/actions/cache-pip
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh --ml
- uses: ./.github/actions/pipdeptree
- name: Import check
run: |
python tests/check_mlflow_lazily_imports_ml_packages.py
- name: Run tests
run: |
source dev/setup-ssh.sh
./dev/run-python-tests.sh
- uses: ./.github/actions/untracked
database:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Build
run: |
./tests/db/compose.sh pull -q postgresql mysql mssql
./tests/db/compose.sh build --build-arg DEPENDENCIES="$(python setup.py -q dependencies)"
- name: Run tests
run: |
set +e
err=0
trap 'err=1' ERR
for service in $(./tests/db/compose.sh config --services | grep '^mlflow-')
do
# Set `--no-TTY` to show container logs on GitHub Actions:
# https://github.com/actions/virtual-environments/issues/5022
./tests/db/compose.sh run --rm --no-TTY $service pytest \
tests/store/tracking/test_sqlalchemy_store.py \
tests/store/model_registry/test_sqlalchemy_store.py \
tests/db
done
test $err = 0
- name: Run migration check
run: |
set +e
err=0
trap 'err=1' ERR
./tests/db/compose.sh down --volumes --remove-orphans
for service in $(./tests/db/compose.sh config --services | grep '^migration-')
do
./tests/db/compose.sh run --rm --no-TTY $service
done
test $err = 0
- name: Rebuild images with SQLAlchemy < 2.0
run: |
sed -i 's/sqlalchemy.*/sqlalchemy<2.0/g' requirements/core-requirements.txt
git diff
./tests/db/compose.sh build --build-arg DEPENDENCIES="$(python setup.py -q dependencies)"
- name: Run tests
run: |
set +e
err=0
trap 'err=1' ERR
for service in $(./tests/db/compose.sh config --services | grep '^mlflow-')
do
# Set `--no-TTY` to show container logs on GitHub Actions:
./tests/db/compose.sh run --rm --no-TTY $service pytest \
tests/store/tracking/test_sqlalchemy_store.py \
tests/store/model_registry/test_sqlalchemy_store.py \
tests/db
done
test $err = 0
- name: Clean up
run: |
./tests/db/compose.sh down --volumes --remove-orphans --rmi all
- uses: ./.github/actions/untracked
java:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-java
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh
- uses: ./.github/actions/pipdeptree
- name: Run tests
run: |
cd mlflow/java
mvn clean package -q
flavors:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-pyenv
- uses: ./.github/actions/setup-java
- uses: ./.github/actions/cache-pip
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh --ml
- uses: ./.github/actions/pipdeptree
- name: Run tests
run: |
./dev/run-python-flavor-tests.sh;
# It takes 9 ~ 10 minutes to run tests in `tests/models`. To make CI finish faster,
# run these tests in a separate job.
models:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-pyenv
- uses: ./.github/actions/setup-java
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh
pip install pyspark
- uses: ./.github/actions/pipdeptree
- name: Run tests
run: |
export MLFLOW_HOME=$(pwd)
pytest tests/models
- uses: ./.github/actions/untracked
# NOTE: numpy is pinned in this suite due to its heavy reliance on shap, which internally uses
# references to the now fully deprecated (as of 1.24.x) numpy types (i.e., np.bool).
# When the shap cross version tests are passing in a new release version of shap, this pin should
# be removed.
evaluate:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-pyenv
- uses: ./.github/actions/setup-java
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh
pip install pyspark "numpy<1.24.0"
- uses: ./.github/actions/pipdeptree
- name: Run tests
run: |
export MLFLOW_HOME=$(pwd)
pytest tests/evaluate
- uses: ./.github/actions/untracked
pyfunc:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-pyenv
- uses: ./.github/actions/setup-java
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh
pip install tensorflow pyspark
- uses: ./.github/actions/pipdeptree
- name: Run tests
run: |
export MLFLOW_HOME=$(pwd)
pytest --durations=30 tests/pyfunc
- uses: ./.github/actions/untracked
sagemaker:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-java
- name: Install dependencies
run: |
source ./dev/install-common-deps.sh --ml
- uses: ./.github/actions/pipdeptree
- name: Run tests
run: |
./dev/run-python-sagemaker-tests.sh;
- uses: ./.github/actions/untracked
windows:
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
runs-on: windows-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: ./.github/actions/setup-python
- uses: ./.github/actions/setup-pyenv
- uses: ./.github/actions/setup-java
- name: Install python dependencies
run: |
pip install -r requirements/test-requirements.txt
pip install --no-dependencies tests/resources/mlflow-test-plugin
pip install -e .[extras]
pip install pyspark
pip install mleap
# Install Hugging Face datasets to test Hugging Face usage with MLflow dataset tracking
pip install datasets
# Install TensorFlow to test TensorFlow dataset usage with MLflow dataset tracking
pip install tensorflow
- uses: ./.github/actions/pipdeptree
- name: Download Hadoop winutils for Spark
run: |
git clone https://github.com/cdarlint/winutils /tmp/winutils
- name: Run python tests
env:
# Starting from SQLAlchemy version 2.0, `QueuePool` is the default connection pool
# when creating an `Engine`. `QueuePool` prevents the removal of temporary database
# files created during tests on Windows as it keeps the DB connection open until
# it's explicitly disposed.
MLFLOW_SQLALCHEMYSTORE_POOLCLASS: "NullPool"
run: |
# Set Hadoop environment variables required for testing Spark integrations on Windows
export HADOOP_HOME=/tmp/winutils/hadoop-3.2.2
export PATH=$PATH:$HADOOP_HOME/bin
# Run Windows tests
pytest --ignore-flavors --ignore=tests/projects --ignore=tests/examples tests --ignore=tests/recipes --ignore=tests/evaluate
# MLeap is incompatible on Windows with PySpark3.4 release.
# Reinstate tests when MLeap has released a fix. [ML-30491]
# pytest tests/mleap
- uses: ./.github/actions/untracked