From 7d1a02cac553d60280d23163824b471b06200114 Mon Sep 17 00:00:00 2001 From: Charles Blackmon-Luca <20627856+charlesbluca@users.noreply.github.com> Date: Mon, 20 Dec 2021 11:38:08 -0500 Subject: [PATCH] Add workflow / recipe to generate Dask nightlies (#8469) As part of addressing https://github.com/dask/community/issues/76, this PR adds: - A conda recipe in `continuous_integration` to build a nightly `dask-core` package - A GHA workflow to build this nightly as a check for PRs, and upload this package to the Dask channel under the `dev` label for pushes to `main` --- .github/workflows/conda.yml | 59 +++++++++++++++++++++++++ continuous_integration/recipe/meta.yaml | 47 ++++++++++++++++++++ 2 files changed, 106 insertions(+) create mode 100644 .github/workflows/conda.yml create mode 100644 continuous_integration/recipe/meta.yaml diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml new file mode 100644 index 00000000000..b10ec6cba46 --- /dev/null +++ b/.github/workflows/conda.yml @@ -0,0 +1,59 @@ +name: Conda build +on: + push: + branches: + - main + pull_request: + +# When this workflow is queued, automatically cancel any previous running +# or pending jobs from the same branch +concurrency: + group: conda-${{ github.head_ref }} + cancel-in-progress: true + +# Required shell entrypoint to have properly activated conda environments +defaults: + run: + shell: bash -l {0} + +jobs: + conda: + name: Build (and upload) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Set up Python + uses: conda-incubator/setup-miniconda@v2 + with: + miniforge-variant: Mambaforge + use-mamba: true + python-version: 3.8 + - name: Install dependencies + run: | + mamba install boa conda-verify + + which python + pip list + mamba list + - name: Build conda package + run: | + # suffix for nightly package versions + export VERSION_SUFFIX=a`date +%y%m%d` + + conda mambabuild continuous_integration/recipe \ + --no-anaconda-upload \ + --output-folder . + - name: Upload conda package + if: | + github.event_name == 'push' + && github.ref == 'refs/heads/main' + && github.repository == 'dask/dask' + env: + ANACONDA_API_TOKEN: ${{ secrets.DASK_CONDA_TOKEN }} + run: | + # install anaconda for upload + mamba install anaconda-client + + anaconda upload --label dev noarch/*.tar.bz2 diff --git a/continuous_integration/recipe/meta.yaml b/continuous_integration/recipe/meta.yaml new file mode 100644 index 00000000000..66b0a0c2151 --- /dev/null +++ b/continuous_integration/recipe/meta.yaml @@ -0,0 +1,47 @@ +{% set version = environ.get('GIT_DESCRIBE_TAG', '0.0.0.dev') + environ.get('VERSION_SUFFIX', '') %} +{% set py_version=environ.get('CONDA_PY', 36) %} + + +package: + name: dask-core + version: {{ version }} + +source: + git_url: ../.. + +build: + number: {{ GIT_DESCRIBE_NUMBER }} + noarch: python + string: py{{ py_version }}_{{ GIT_DESCRIBE_HASH }}_{{ GIT_DESCRIBE_NUMBER }} + script: {{ PYTHON }} -m pip install . -vv + +requirements: + host: + - python >=3.7 + - pip + + run: + - python >=3.7 + - cloudpickle >=1.1.1 + - fsspec >=0.6.0 + - packaging >=20.0 + - partd >=0.3.10 + - pyyaml + - toolz >=0.8.2 + +test: + imports: + - dask + commands: + - pip check + requires: + - pip + +about: + home: https://github.com/dask/dask/ + summary: Parallel PyData with Task Scheduling + license: BSD-3-Clause + license_file: LICENSE.txt + summary: Parallel Python with task scheduling + doc_url: https://dask.org/ + dev_url: https://github.com/dask/dask