-
Notifications
You must be signed in to change notification settings - Fork 72
116 lines (110 loc) · 3.91 KB
/
conda.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
name: Build conda nightly
on:
push:
branches:
- main
pull_request:
paths:
- Cargo.toml
- Cargo.lock
- pyproject.toml
- continuous_integration/recipe/**
- .github/workflows/conda.yml
schedule:
- cron: '0 0 * * 0'
# When this workflow is queued, automatically cancel any previous running
# or pending jobs from the same branch
concurrency:
group: conda-${{ github.head_ref }}
cancel-in-progress: true
# Required shell entrypoint to have properly activated conda environments
defaults:
run:
shell: bash -l {0}
jobs:
conda:
name: "Build conda nightlies (python: ${{ matrix.python }}, arch: ${{ matrix.arch }})"
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python: ["3.9", "3.10", "3.11", "3.12"]
arch: ["linux-64", "linux-aarch64"]
steps:
- name: Manage disk space
if: matrix.arch == 'linux-aarch64'
run: |
sudo mkdir -p /opt/empty_dir || true
for d in \
/opt/ghc \
/opt/hostedtoolcache \
/usr/lib/jvm \
/usr/local/.ghcup \
/usr/local/lib/android \
/usr/local/share/powershell \
/usr/share/dotnet \
/usr/share/swift \
; do
sudo rsync --stats -a --delete /opt/empty_dir/ $d || true
done
sudo apt-get purge -y -f firefox \
google-chrome-stable \
microsoft-edge-stable
sudo apt-get autoremove -y >& /dev/null
sudo apt-get autoclean -y >& /dev/null
sudo docker image prune --all --force
df -h
- name: Create swapfile
if: matrix.arch == 'linux-aarch64'
run: |
sudo fallocate -l 10GiB /swapfile || true
sudo chmod 600 /swapfile || true
sudo mkswap /swapfile || true
sudo swapon /swapfile || true
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: conda-incubator/setup-miniconda@v2.3.0
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: "3.9"
channel-priority: strict
- name: Install dependencies
run: |
mamba install -c conda-forge "boa<0.17" "conda-build<24.1" conda-verify
which python
pip list
mamba list
- name: Build conda packages
run: |
# suffix for nightly package versions
export VERSION_SUFFIX=a`date +%y%m%d`
conda mambabuild continuous_integration/recipe \
--python ${{ matrix.python }} \
--variants "{target_platform: [${{ matrix.arch }}]}" \
--error-overlinking \
--no-test \
--no-anaconda-upload \
--output-folder packages
- name: Test conda packages
if: matrix.arch == 'linux-64' # can only test native platform packages
run: |
conda mambabuild --test packages/${{ matrix.arch }}/*.tar.bz2
- name: Upload conda packages as artifacts
uses: actions/upload-artifact@v3
with:
name: "conda nightlies (python - ${{ matrix.python }}, arch - ${{ matrix.arch }})"
# need to install all conda channel metadata to properly install locally
path: packages/
- name: Upload conda packages to Anaconda
if: |
github.event_name == 'push'
&& github.repository == 'dask-contrib/dask-sql'
env:
ANACONDA_API_TOKEN: ${{ secrets.DASK_CONDA_TOKEN }}
run: |
# install anaconda for upload
mamba install -c conda-forge anaconda-client
anaconda upload --label dev packages/${{ matrix.arch }}/*.tar.bz2