forked from anovos/anovos
87 lines (77 loc) · 2.11 KB
/
demo.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
name: Demo
on:
workflow_dispatch:
workflow_call:
push:
paths:
- '.github/workflows/demo.yml'
- 'config/configs_basic.yaml'
branches-ignore:
- main
jobs:
run-demo:
runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.experimental }}
strategy:
fail-fast: false
max-parallel: 1
matrix:
include:
- os: 'macos-latest'
python: '3.7'
spark: '2.4.8'
hadoop: '2.7'
java: '8'
experimental: true
- os: 'ubuntu-latest'
python: '3.9'
spark: '3.1.3'
hadoop: '3.2'
java: '11'
experimental: false
- os: 'ubuntu-latest'
python: '3.9'
spark: '3.2.1'
hadoop: '3.2'
java: '11'
experimental: false
steps:
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
- uses: actions/setup-java@v3
with:
distribution: 'zulu'
java-version: ${{ matrix.java }}
- uses: vemonet/setup-spark@v1
with:
spark-version: ${{ matrix.spark }}
hadoop-version: ${{ matrix.hadoop }}
- name: Check Spark setup
run: |
echo $SPARK_HOME
ls $SPARK_HOME
which spark-submit
which spark-shell
- name: Checkout code
uses: actions/checkout@master
- name: Ensure clean environment
run: |
make clean build
- name: Load PIP cache
uses: actions/cache@v3.0.4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ matrix.python}}-${{ hashFiles('./requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-${{ matrix.python}}-
${{ runner.os }}-pip-
- name: Install Python requirements
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Run the demo
run: |
cp config/configs_basic.yaml dist/configs.yaml
cd dist/
./spark-submit.sh