forked from NVIDIA/spark-rapids
/
Jenkinsfile.databricksnightly
113 lines (102 loc) · 4.05 KB
/
Jenkinsfile.databricksnightly
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
#!/usr/local/env groovy
/*
* Copyright (c) 2020, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* Jenkinsfile for building rapids-plugin on Databricks
*
*/
@Library(['shared-libs', 'spark-jenkins-shared-lib']) _
def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"
pipeline {
agent {
docker {
label 'docker-gpu'
image "${ArtifactoryConstants.ARTIFACTORY_NAME}/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1"
args '--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \
-v ${HOME}/.zinc:${HOME}/.zinc:rw'
}
}
options {
ansiColor('xterm')
timeout(time: 180, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}
parameters {
choice(name: 'DEPLOY_TO', choices: ['Urm', 'Local'],
description: 'Where to deploy artifacts to')
string(name: 'DATABRICKS_VERSION',
defaultValue: '0.3.0-SNAPSHOT', description: 'Version to set')
string(name: 'CUDF_VERSION',
defaultValue: '0.16-SNAPSHOT', description: 'Cudf version to use')
string(name: 'CUDA_VERSION',
defaultValue: 'cuda10-1', description: 'cuda version to use')
string(name: 'CLUSTER_ID',
defaultValue: '0909-141326-pawl52', description: 'databricks cluster id')
string(name: 'REF', defaultValue: 'branch-0.3', description: 'Commit to build')
}
environment {
JENKINS_ROOT = 'jenkins'
MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
LIBCUDF_KERNEL_CACHE_PATH='/tmp'
URM_CREDS = credentials("svcngcc_artifactory")
DATABRICKS_TOKEN = credentials("SPARK_DATABRICKS_TOKEN")
SCALA_VERSION = '2.12'
SPARK_VERSION = '3.0.0-databricks'
CI_RAPIDS_JAR = 'rapids-4-spark_2.12-0.1-SNAPSHOT-ci.jar'
CI_CUDF_JAR = 'cudf-0.14-cuda10-1.jar'
URM_URL = "${urmUrl}"
}
triggers {
cron('H 5 * * *')
}
stages {
stage('Ubuntu16 CUDA10.1') {
steps {
script {
sshagent(credentials : ['svcngcc_pubpriv']) {
sh "rm -rf spark-rapids-ci.tgz"
sh "tar -zcvf spark-rapids-ci.tgz *"
sh "python3.6 ./jenkins/databricks/run-tests.py -c $CLUSTER_ID -z ./spark-rapids-ci.tgz -t $DATABRICKS_TOKEN -p /home/svcngcc/.ssh/id_rsa -l ./jenkins/databricks/build.sh -j $CI_RAPIDS_JAR -b $DATABRICKS_VERSION -k $SPARK_VERSION -a $SCALA_VERSION -f $CUDF_VERSION -u $CUDA_VERSION -m $CI_CUDF_JAR"
sh "./jenkins/databricks/deploy.sh"
}
}
}
}
} // end of stages
post {
always {
script {
sh "python3.6 ./jenkins/databricks/shutdown.py -c $CLUSTER_ID -t $DATABRICKS_TOKEN || true"
if (currentBuild.currentResult == "SUCCESS") {
slack("#swrapids-spark-cicd", "Success", color: "#33CC33")
} else {
slack("#swrapids-spark-cicd", "Failed", color: "#FF0000")
}
}
}
}
} // end of pipeline
void slack(Map params = [:], String channel, String message) {
Map defaultParams = [
color: "#000000",
baseUrl: "${SparkConstants.SLACK_API_ENDPOINT}",
tokenCredentialId: "slack_token"
]
params["channel"] = channel
params["message"] = "${BUILD_URL}\n" + message
slackSend(defaultParams << params)
}