forked from NVIDIA/spark-rapids
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Jenkinsfile.release
92 lines (82 loc) · 3.18 KB
/
Jenkinsfile.release
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
#!/usr/local/env groovy
/*
* Copyright (c) 2019-2020, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* Jenkinsfile for building and deploy rapids-plugin to public repo
*
*/
@Library('shared-libs') _
def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"
pipeline {
agent { label 'docker-deploy||docker-gpu' }
options {
ansiColor('xterm')
timeout(time: 180, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}
parameters {
string(name: 'DEPLOY_TO', defaultValue: 'https://oss.sonatype.org/service/local/staging/deploy/maven2',
description: 'The repo URL where to deploy the artifacts')
string(name: 'REF', defaultValue: 'main', description: 'Commit to build')
}
environment {
JENKINS_ROOT='jenkins'
IMAGE_NAME="${ArtifactoryConstants.ARTIFACTORY_NAME}/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1"
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
MVN_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
DIST_PL='dist'
SQL_PL='sql-plugin'
TESTS_PL='integration_tests'
URM_URL = "${urmUrl}"
}
stages {
stage('Build') {
steps {
script {
sh "docker pull $IMAGE_NAME"
sh "mkdir -p ${HOME}/.zinc"
docker.image("$IMAGE_NAME").inside("--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \
-v ${HOME}/.zinc:${HOME}/.zinc:rw") {
sh "mvn dependency:purge-local-repository -DmanualInclude='com.nvidia:rapids-4-spark-shims-spark300-databricks_2.12' \
-U -B clean install $MVN_MIRROR -P 'include-databricks,source-javadoc,!snapshot-shims'"
}
}
}
}
stage("Deploy") {
environment {
SERVER_ID='ossrh'
SERVER_URL="${DEPLOY_TO}"
GPG_PASSPHRASE=credentials('SPARK_RAPIDS_GPG_PASSPHRASE')
GPG_FILE=credentials('SPARK_RAPIDS_GPG_PRIVATE_KEY')
SONATYPE=credentials('SPARK_SONATYPE_USERPASS')
GNUPGHOME="${WORKSPACE}/.gnupg"
}
steps {
script {
docker.image("$IMAGE_NAME").inside("-v ${HOME}/.m2:${HOME}/.m2:rw") {
sh 'rm -rf $GNUPGHOME'
sh 'gpg --import $GPG_FILE'
retry (3) {
sh "bash $JENKINS_ROOT/deploy.sh true false"
}
}
}
}
}
} // End of stages
}