forked from apache/sedona
-
-
Notifications
You must be signed in to change notification settings - Fork 0
76 lines (72 loc) · 2.54 KB
/
python.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
name: Python build
on:
push:
branches:
- master
pull_request:
branches:
- '*'
jobs:
build:
runs-on: ubuntu-18.04
strategy:
matrix:
include:
- spark: 3.2.0
scala: 2.12.8
python: 3.9
- spark: 3.2.0
scala: 2.12.8
python: 3.8
- spark: 3.2.0
scala: 2.12.8
python: 3.7
- spark: 3.1.2
scala: 2.12.8
python: 3.7
- spark: 3.0.3
scala: 2.12.8
python: 3.7
- spark: 2.4.8
scala: 2.11.8
python: 3.7
steps:
- uses: actions/checkout@v2
- uses: actions/setup-java@v1
with:
java-version: '8'
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Cache Maven packages
uses: actions/cache@v2
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2
- run: git submodule update --init --recursive # Checkout Git submodule if necessary
- env:
SPARK_VERSION: ${{ matrix.spark }}
SCALA_VERSION: ${{ matrix.scala }}
run: if [ ${SPARK_VERSION:0:1} == "3" ]; then mvn -q clean install -DskipTests -Dscala=${SCALA_VERSION:0:4} -Dspark=3.0 -Dgeotools ; else mvn -q clean install -DskipTests -Dscala=${SCALA_VERSION:0:4} -Dspark=2.4 -Dgeotools ; fi
- env:
SPARK_VERSION: ${{ matrix.spark }}
run: wget https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop2.7.tgz
- env:
SPARK_VERSION: ${{ matrix.spark }}
run: tar -xzf spark-${SPARK_VERSION}-bin-hadoop2.7.tgz
- run: sudo apt-get -y install python3-pip python-dev libgeos-dev
- run: sudo pip3 install -U setuptools
- run: sudo pip3 install -U wheel
- run: sudo pip3 install -U virtualenvwrapper
- run: python3 -m pip install pipenv
- env:
SPARK_VERSION: ${{ matrix.spark }}
PYTHON_VERSION: ${{ matrix.python }}
run: (cd python;pipenv --python ${PYTHON_VERSION};pipenv install pyspark==${SPARK_VERSION};pipenv install --dev;pipenv graph)
- env:
SPARK_VERSION: ${{ matrix.spark }}
run: find python-adapter/target -name sedona-* -exec cp {} spark-${SPARK_VERSION}-bin-hadoop2.7/jars/ \;
- env:
SPARK_VERSION: ${{ matrix.spark }}
run: (export SPARK_HOME=$PWD/spark-${SPARK_VERSION}-bin-hadoop2.7;export PYTHONPATH=$SPARK_HOME/python;cd python;pipenv run pytest tests)