Skip to content

Give conda-forge higher priority than nvidia in GPU environments #3713

Give conda-forge higher priority than nvidia in GPU environments

Give conda-forge higher priority than nvidia in GPU environments #3713

Workflow file for this run

name: Test Python package
on:
push:
branches:
- main
pull_request:
# When this workflow is queued, automatically cancel any previous running
# or pending jobs from the same branch
concurrency:
group: test-${{ github.head_ref }}
cancel-in-progress: true
# Required shell entrypoint to have properly activated conda environments
defaults:
run:
shell: bash -l {0}
jobs:
detect-ci-trigger:
name: Check for upstream trigger phrase
runs-on: ubuntu-latest
if: github.repository == 'dask-contrib/dask-sql'
outputs:
triggered: ${{ steps.detect-trigger.outputs.trigger-found }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 2
- uses: xarray-contrib/ci-trigger@v1.2
id: detect-trigger
with:
keyword: "[test-upstream]"
test:
name: "Build & Test (${{ matrix.os }}, python: ${{ matrix.python }}, distributed: ${{ matrix.distributed }})"
needs: [detect-ci-trigger]
runs-on: ${{ matrix.os }}
env:
CONDA_FILE: continuous_integration/environment-${{ matrix.python }}-dev.yaml
DASK_SQL_DISTRIBUTED_TESTS: ${{ matrix.distributed }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python: ["3.8", "3.9", "3.10"]
distributed: [false]
include:
# run tests on a distributed client
- os: "ubuntu-latest"
python: "3.8"
distributed: true
- os: "ubuntu-latest"
python: "3.10"
distributed: true
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: conda-incubator/setup-miniconda@v2.2.0
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: ${{ matrix.python }}
channel-priority: strict
activate-environment: dask-sql
environment-file: ${{ env.CONDA_FILE }}
run-post: ${{ matrix.os != 'windows-latest' && 'true' || 'false' }}
- name: Cache Rust
uses: Swatinem/rust-cache@v2
with:
workspaces: dask_planner
shared-key: test
- name: Build the Rust DataFusion bindings
run: |
python setup.py build install
- name: Install hive testing dependencies
if: matrix.os == 'ubuntu-latest'
run: |
mamba install -c conda-forge "sasl>=0.3.1"
docker pull bde2020/hive:2.3.2-postgresql-metastore
docker pull bde2020/hive-metastore-postgresql:2.3.0
- name: Optionally install upstream dev Dask
if: needs.detect-ci-trigger.outputs.triggered == 'true'
run: |
mamba install --no-channel-priority dask/label/dev::dask
- name: Test with pytest
run: |
pytest --junitxml=junit/test-results.xml --cov-report=xml -n auto tests --dist loadfile
- name: Upload pytest test results
if: always()
uses: actions/upload-artifact@v3
with:
name: pytest-results
path: junit/test-results.xml
- name: Upload coverage to Codecov
if: github.repository == 'dask-contrib/dask-sql'
uses: codecov/codecov-action@v3
import:
name: "Test importing with bare requirements"
needs: [detect-ci-trigger]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: conda-incubator/setup-miniconda@v2.2.0
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: "3.8"
channel-priority: strict
- name: Install Protoc
uses: arduino/setup-protoc@v1
with:
version: '3.x'
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install dependencies and nothing else
run: |
mamba install "setuptools-rust>=1.5.2"
pip install -e . -vv
which python
pip list
mamba list
- name: Optionally install upstream dev Dask
if: needs.detect-ci-trigger.outputs.triggered == 'true'
run: |
python -m pip install git+https://github.com/dask/dask
python -m pip install git+https://github.com/dask/distributed
- name: Try to import dask-sql
run: |
python -c "import dask_sql; print('ok')"