-
Notifications
You must be signed in to change notification settings - Fork 142
99 lines (77 loc) · 3.29 KB
/
dev-deploy.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
name: dev-deploy
on:
push:
branches: [ master ]
env:
PKG_NAME: sparkler-app
PKG_VERSION: N/A
PKG_PATH: sparkler-core
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DATABRICKS_HOST: https://dbc-abaef56e-ca8a.cloud.databricks.com
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_DEV_TOKEN }}
DATABRICKS_RELEASE_PATH: dbfs:/FileStore/release
jobs:
standalone:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
java-version: '8'
distribution: adopt
- name: Install Databricks CLI
run: pip install databricks-cli
- name: Create the release folder
run: databricks fs mkdirs ${{ env.DATABRICKS_RELEASE_PATH }}
- name: Set package version
run: echo "PKG_VERSION=$(grep version version.sbt | cut -d'"' -f2)" >> $GITHUB_ENV
working-directory: ${{ env.PKG_PATH }}
- name: Build "Standalone" package
run: sbt package assembly -Dsparkprovided=false -Dmaven.javadoc.skip=true
working-directory: ${{ env.PKG_PATH }}
- name: Remove library jars
run: rm -r build/${{ env.PKG_NAME }}-${{ env.PKG_VERSION }}
working-directory: ${{ env.PKG_PATH }}
- name: Zip the Sparkler build
run: zip -r ${{ env.PKG_NAME }}-${{ env.PKG_VERSION }}.zip *
working-directory: ${{ env.PKG_PATH }}/build
- name: Deploy "Standalone" to Databricks
run: databricks fs cp ${{ env.SRC_ZIP }} ${{ env.DEST_ZIP }}
working-directory: ${{ env.PKG_PATH }}/build
env:
SRC_ZIP: ${{ env.PKG_NAME }}-${{ env.PKG_VERSION }}.zip
DEST_ZIP: ${{ env.DATABRICKS_RELEASE_PATH }}/${{ env.PKG_NAME }}-${{ env.PKG_VERSION }}-$GITHUB_SHA.zip
submit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
java-version: '8'
distribution: adopt
- name: Install Databricks CLI
run: pip install databricks-cli
- name: Create the release folder
run: databricks fs mkdirs ${{ env.DATABRICKS_RELEASE_PATH }}
- name: Set package version
run: echo "PKG_VERSION=$(grep version version.sbt | cut -d'"' -f2)" >> $GITHUB_ENV
working-directory: ${{ env.PKG_PATH }}
- name: Create the plugins folder
run: databricks fs mkdirs ${{ env.DATABRICKS_RELEASE_PATH }}/plugins/plugins-${{ env.PKG_VERSION }}
- name: Build "Submit" package
run: sbt clean package assembly -Dsparkprovided=true -Dmaven.javadoc.skip=true
working-directory: ${{ env.PKG_PATH }}
- name: Deploy "Submit" to Databricks
run: databricks fs cp ${{ env.SRC_JAR }} ${{ env.DEST_JAR }}
working-directory: ${{ env.PKG_PATH }}
env:
SRC_JAR: build/${{ env.PKG_NAME }}-${{ env.PKG_VERSION }}.jar
DEST_JAR: ${{ env.DATABRICKS_RELEASE_PATH }}/${{ env.PKG_NAME }}-${{ env.PKG_VERSION }}-$GITHUB_SHA.jar
- name: Deploy plugins to Databricks
run: databricks fs cp --overwrite --recursive ${{ env.SRC_JARS }} ${{ env.DEST_DIR }}
working-directory: ${{ env.PKG_PATH }}
env:
SRC_JARS: build/plugins/
DEST_DIR: ${{ env.DATABRICKS_RELEASE_PATH }}/plugins/plugins-${{ env.PKG_VERSION }}/