Skip to content

Commit

Permalink
[SW-841] Remove withCustomCommitsState in pipelines as it's now dupli…
Browse files Browse the repository at this point in the history
…cating Github (#711)

(cherry picked from commit 421cc78)
  • Loading branch information
jakubhava committed May 16, 2018
1 parent 3971e53 commit 303f1cd
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 66 deletions.
58 changes: 28 additions & 30 deletions jenkins/Jenkinsfile-external
Expand Up @@ -45,35 +45,33 @@ if (env.CHANGE_BRANCH != null && env.CHANGE_BRANCH != ''){
cancelPreviousBuilds()
}

withCustomCommitStates(scm, 'h2o-ops-personal-auth-token', 'continuous-integration/jenkins/sw-external') {
node('dX-hadoop') {
// Clean workspace
sh 'rm -rf *'
// Get Sparkling Water and save the scm environment variables
checkout scm
// Get the script with the pipeline
def pipeline = load 'jenkins/sparklingWaterPipeline.groovy'
node('dX-hadoop') {
// Clean workspace
sh 'rm -rf *'
// Get Sparkling Water and save the scm environment variables
checkout scm
// Get the script with the pipeline
def pipeline = load 'jenkins/sparklingWaterPipeline.groovy'

// Execute the pipeline
pipeline(params) { p ->
sparkVersion = "${p.sparkVersion}"
runUnitTests = "${p.runUnitTests}"
runLocalIntegTests = "${p.runLocalIntegTests}"
runScriptTests = "${p.runScriptTests}"
runIntegTests = "${p.runIntegTests}"
runPySparklingIntegTests = "${p.runPySparklingIntegTests}"
sparklingTestEnv = "${p.sparklingTestEnv}"
buildAgainstH2OBranch = "${p.buildAgainstH2OBranch}"
h2oBranch = "${p.h2oBranch}"
buildAgainstSparkBranch = "${p.buildAgainstSparkBranch}"
sparkBranch = "${p.sparkBranch}"
hadoopVersion = "2.6"
backendMode = "external"
hdpVersion = "${p.hdpVersion}"
driverHadoopVersion = "${p.driverHadoopVersion}"
buildNightly = "false"
uploadNightly = "false"
// Execute the pipeline
pipeline(params) { p ->
sparkVersion = "${p.sparkVersion}"
runUnitTests = "${p.runUnitTests}"
runLocalIntegTests = "${p.runLocalIntegTests}"
runScriptTests = "${p.runScriptTests}"
runIntegTests = "${p.runIntegTests}"
runPySparklingIntegTests = "${p.runPySparklingIntegTests}"
sparklingTestEnv = "${p.sparklingTestEnv}"
buildAgainstH2OBranch = "${p.buildAgainstH2OBranch}"
h2oBranch = "${p.h2oBranch}"
buildAgainstSparkBranch = "${p.buildAgainstSparkBranch}"
sparkBranch = "${p.sparkBranch}"
hadoopVersion = "2.6"
backendMode = "external"
hdpVersion = "${p.hdpVersion}"
driverHadoopVersion = "${p.driverHadoopVersion}"
buildNightly = "false"
uploadNightly = "false"

}
}
}
}
}
51 changes: 24 additions & 27 deletions jenkins/Jenkinsfile-internal
Expand Up @@ -37,33 +37,30 @@ properties(
if (env.CHANGE_BRANCH != null && env.CHANGE_BRANCH != ''){
cancelPreviousBuilds()
}
node('dX-hadoop') {
// Clean workspace
sh 'rm -rf *'
// Get Sparkling Water and save the scm environment variables
checkout scm
// Get the script with the pipeline
def pipeline = load 'jenkins/sparklingWaterPipeline.groovy'

withCustomCommitStates(scm, 'h2o-ops-personal-auth-token', 'continuous-integration/jenkins/sw-internal') {
node('dX-hadoop') {
// Clean workspace
sh 'rm -rf *'
// Get Sparkling Water and save the scm environment variables
checkout scm
// Get the script with the pipeline
def pipeline = load 'jenkins/sparklingWaterPipeline.groovy'

// Execute the pipeline
pipeline(params) { p ->
sparkVersion = "${p.sparkVersion}"
runUnitTests = "${p.runUnitTests}"
runLocalIntegTests = "${p.runLocalIntegTests}"
runScriptTests = "${p.runScriptTests}"
runIntegTests = "${p.runIntegTests}"
runPySparklingIntegTests = "${p.runPySparklingIntegTests}"
sparklingTestEnv = "${p.sparklingTestEnv}"
buildAgainstH2OBranch = "${p.buildAgainstH2OBranch}"
h2oBranch = "${p.h2oBranch}"
buildAgainstSparkBranch = "${p.buildAgainstSparkBranch}"
sparkBranch = "${p.sparkBranch}"
hadoopVersion = "2.6"
backendMode = "internal"
buildNightly = "false"
uploadNightly = "false"
}
// Execute the pipeline
pipeline(params) { p ->
sparkVersion = "${p.sparkVersion}"
runUnitTests = "${p.runUnitTests}"
runLocalIntegTests = "${p.runLocalIntegTests}"
runScriptTests = "${p.runScriptTests}"
runIntegTests = "${p.runIntegTests}"
runPySparklingIntegTests = "${p.runPySparklingIntegTests}"
sparklingTestEnv = "${p.sparklingTestEnv}"
buildAgainstH2OBranch = "${p.buildAgainstH2OBranch}"
h2oBranch = "${p.h2oBranch}"
buildAgainstSparkBranch = "${p.buildAgainstSparkBranch}"
sparkBranch = "${p.sparkBranch}"
hadoopVersion = "2.6"
backendMode = "internal"
buildNightly = "false"
uploadNightly = "false"
}
}
18 changes: 9 additions & 9 deletions jenkins/sparklingWaterPipeline.groovy
Expand Up @@ -63,7 +63,7 @@ def getGradleCommand(config) {

def prepareSparkEnvironment() {
return { config ->
stage('Prepare Spark Environment') {
stage('Prepare Spark Environment- ' + config.backendMode) {

if (config.buildAgainstSparkBranch.toBoolean()) {
// build spark
Expand Down Expand Up @@ -104,7 +104,7 @@ def prepareSparkEnvironment() {

def prepareSparklingWaterEnvironment() {
return { config ->
stage('QA: Prepare Sparkling Water Environment') {
stage('QA: Prepare Sparkling Water Environment- ' + config.backendMode) {

// Warm up Gradle wrapper. When the gradle wrapper is downloaded for the first time, it prints message
// with release notes which can mess up the build
Expand Down Expand Up @@ -160,7 +160,7 @@ def prepareSparklingWaterEnvironment() {

def buildAndLint() {
return { config ->
stage('QA: Build and Lint') {
stage('QA: Build and Lint- ' + config.backendMode) {
withCredentials([usernamePassword(credentialsId: "LOCAL_NEXUS", usernameVariable: 'LOCAL_NEXUS_USERNAME', passwordVariable: 'LOCAL_NEXUS_PASSWORD')]) {
sh """
# Build
Expand All @@ -173,7 +173,7 @@ def buildAndLint() {

def unitTests() {
return { config ->
stage('QA: Unit Tests') {
stage('QA: Unit Tests- ' + config.backendMode) {
if (config.runUnitTests.toBoolean()) {
try {
withCredentials([string(credentialsId: "DRIVERLESS_AI_LICENSE_KEY", variable: "DRIVERLESS_AI_LICENSE_KEY")]) {
Expand All @@ -198,7 +198,7 @@ def unitTests() {

def localIntegTest() {
return { config ->
stage('QA: Local Integration Tests') {
stage('QA: Local Integration Tests- ' + config.backendMode) {

if (config.runLocalIntegTests.toBoolean()) {
try {
Expand All @@ -220,7 +220,7 @@ def localIntegTest() {

def scriptsTest() {
return { config ->
stage('QA: Script Tests') {
stage('QA: Script Tests- ' + config.backendMode) {
if (config.runScriptTests.toBoolean()) {
try {
sh """
Expand All @@ -240,7 +240,7 @@ def scriptsTest() {

def integTest() {
return { config ->
stage('QA: Integration Tests') {
stage('QA: Integration Tests- ' + config.backendMode) {
if (config.runIntegTests.toBoolean()) {
try {
sh """
Expand All @@ -259,7 +259,7 @@ def integTest() {

def pysparklingIntegTest() {
return { config ->
stage('QA: PySparkling Integration Tests') {
stage('QA: PySparkling Integration Tests- ' + config.backendMode) {
if (config.runPySparklingIntegTests.toBoolean()) {
try{
sh """
Expand All @@ -277,7 +277,7 @@ def pysparklingIntegTest() {

def publishNightly(){
return { config ->
stage ('Nightly: Publishing Artifacts to S3'){
stage ('Nightly: Publishing Artifacts to S3- ' + config.backendMode){
if (config.buildNightly.toBoolean() && config.uploadNightly.toBoolean()) {

withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', credentialsId: 'AWS S3 Credentials', accessKeyVariable: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
Expand Down

0 comments on commit 303f1cd

Please sign in to comment.