Skip to content
Permalink
Browse files
Merge branch 'cassandra-3.0' into cassandra-3.11
  • Loading branch information
michaelsembwever committed May 28, 2022
2 parents 76648a6 + 2b533b2 commit bb0e63fc389f59b98713b762ecd49e9021b1c66f
Showing 1 changed file with 178 additions and 59 deletions.
@@ -28,65 +28,48 @@ pipeline {
stages {
stage('Init') {
steps {
cleanWs()
cleanWs()
script {
currentBuild.result='SUCCESS'
}
}
}
stage('Build') {
steps {
build job: "${env.JOB_NAME}-artifacts"
steps {
script {
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
build job: "${env.JOB_NAME}-artifacts"
}
}
}
}
stage('Test') {
parallel {
stage('stress') {
steps {
script {
stress = build job: "${env.JOB_NAME}-stress-test", propagate: false
if (stress.result != 'SUCCESS') unstable('stress test failures')
if (stress.result == 'FAILURE') currentBuild.result='FAILURE'
}
}
post {
always {
warnError('missing test xml files') {
script {
copyTestResults('stress-test', stress.getNumber())
}
}
}
}
}
stage('jvm-dtest') {
steps {
script {
jvm_dtest = build job: "${env.JOB_NAME}-jvm-dtest", propagate: false
if (jvm_dtest.result != 'SUCCESS') unstable('jvm-dtest failures')
if (jvm_dtest.result == 'FAILURE') currentBuild.result='FAILURE'
}
}
post {
always {
warnError('missing test xml files') {
script {
copyTestResults('jvm-dtest', jvm_dtest.getNumber())
}
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
}
}
}
stage('jvm-dtest-upgrade') {
steps {
script {
jvm_dtest_upgrade = build job: "${env.JOB_NAME}-jvm-dtest-upgrade", propagate: false
if (jvm_dtest_upgrade.result != 'SUCCESS') unstable('jvm-dtest-upgrade failures')
if (jvm_dtest_upgrade.result == 'FAILURE') currentBuild.result='FAILURE'
attempt = attempt + 1
stress = build job: "${env.JOB_NAME}-stress-test", propagate: false
}
if (stress.result != 'SUCCESS') unstable('stress test failures')
if (stress.result == 'FAILURE') currentBuild.result='FAILURE'
}
}
post {
always {
warnError('missing test xml files') {
script {
copyTestResults('jvm-dtest-upgrade', jvm_dtest_upgrade.getNumber())
copyTestResults('stress-test', stress.getNumber())
}
}
}
@@ -95,7 +78,14 @@ pipeline {
stage('units') {
steps {
script {
test = build job: "${env.JOB_NAME}-test", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
test = build job: "${env.JOB_NAME}-test", propagate: false
}
if (test.result != 'SUCCESS') unstable('unit test failures')
if (test.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -113,7 +103,14 @@ pipeline {
stage('long units') {
steps {
script {
long_test = build job: "${env.JOB_NAME}-long-test", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
long_test = build job: "${env.JOB_NAME}-long-test", propagate: false
}
if (long_test.result != 'SUCCESS') unstable('long unit test failures')
if (long_test.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -131,7 +128,14 @@ pipeline {
stage('burn') {
steps {
script {
burn = build job: "${env.JOB_NAME}-test-burn", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
burn = build job: "${env.JOB_NAME}-test-burn", propagate: false
}
if (burn.result != 'SUCCESS') unstable('burn test failures')
if (burn.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -149,7 +153,14 @@ pipeline {
stage('cdc') {
steps {
script {
cdc = build job: "${env.JOB_NAME}-test-cdc", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
cdc = build job: "${env.JOB_NAME}-test-cdc", propagate: false
}
if (cdc.result != 'SUCCESS') unstable('cdc failures')
if (cdc.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -167,7 +178,14 @@ pipeline {
stage('compression') {
steps {
script {
compression = build job: "${env.JOB_NAME}-test-compression", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
compression = build job: "${env.JOB_NAME}-test-compression", propagate: false
}
if (compression.result != 'SUCCESS') unstable('compression failures')
if (compression.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -185,10 +203,17 @@ pipeline {
stage('cqlsh') {
steps {
script {
cqlsh = build job: "${env.JOB_NAME}-cqlsh-tests", propagate: false
if (cqlsh.result != 'SUCCESS') unstable('cqlsh failures')
if (cqlsh.result == 'FAILURE') currentBuild.result='FAILURE'
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
cqlsh = build job: "${env.JOB_NAME}-cqlsh-tests", propagate: false
}
if (cqlsh.result != 'SUCCESS') unstable('cqlsh failures')
if (cqlsh.result == 'FAILURE') currentBuild.result='FAILURE'
}
}
post {
always {
@@ -204,10 +229,67 @@ pipeline {
}
stage('Distributed Test') {
parallel {
stage('jvm-dtest') {
steps {
script {
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
jvm_dtest = build job: "${env.JOB_NAME}-jvm-dtest", propagate: false
}
if (jvm_dtest.result != 'SUCCESS') unstable('jvm-dtest failures')
if (jvm_dtest.result == 'FAILURE') currentBuild.result='FAILURE'
}
}
post {
always {
warnError('missing test xml files') {
script {
copyTestResults('jvm-dtest', jvm_dtest.getNumber())
}
}
}
}
}
stage('jvm-dtest-upgrade') {
steps {
script {
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
jvm_dtest_upgrade = build job: "${env.JOB_NAME}-jvm-dtest-upgrade", propagate: false
}
if (jvm_dtest_upgrade.result != 'SUCCESS') unstable('jvm-dtest-upgrade failures')
if (jvm_dtest_upgrade.result == 'FAILURE') currentBuild.result='FAILURE'
}
}
post {
always {
warnError('missing test xml files') {
script {
copyTestResults('jvm-dtest-upgrade', jvm_dtest_upgrade.getNumber())
}
}
}
}
}
stage('dtest') {
steps {
script {
dtest = build job: "${env.JOB_NAME}-dtest", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
dtest = build job: "${env.JOB_NAME}-dtest", propagate: false
}
if (dtest.result != 'SUCCESS') unstable('dtest failures')
if (dtest.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -225,7 +307,14 @@ pipeline {
stage('dtest-large') {
steps {
script {
dtest_large = build job: "${env.JOB_NAME}-dtest-large", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
dtest_large = build job: "${env.JOB_NAME}-dtest-large", propagate: false
}
if (dtest_large.result != 'SUCCESS') unstable('dtest-large failures')
if (dtest_large.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -243,7 +332,14 @@ pipeline {
stage('dtest-novnode') {
steps {
script {
dtest_novnode = build job: "${env.JOB_NAME}-dtest-novnode", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
dtest_novnode = build job: "${env.JOB_NAME}-dtest-novnode", propagate: false
}
if (dtest_novnode.result != 'SUCCESS') unstable('dtest-novnode failures')
if (dtest_novnode.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -261,7 +357,14 @@ pipeline {
stage('dtest-offheap') {
steps {
script {
dtest_offheap = build job: "${env.JOB_NAME}-dtest-offheap", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
dtest_offheap = build job: "${env.JOB_NAME}-dtest-offheap", propagate: false
}
if (dtest_offheap.result != 'SUCCESS') unstable('dtest-offheap failures')
if (dtest_offheap.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -279,7 +382,14 @@ pipeline {
stage('dtest-large-novnode') {
steps {
script {
dtest_large_novnode = build job: "${env.JOB_NAME}-dtest-large-novnode", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
dtest_large_novnode = build job: "${env.JOB_NAME}-dtest-large-novnode", propagate: false
}
if (dtest_large_novnode.result != 'SUCCESS') unstable('dtest-large-novnode failures')
if (dtest_large_novnode.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -297,7 +407,14 @@ pipeline {
stage('dtest-upgrade') {
steps {
script {
dtest_upgrade = build job: "${env.JOB_NAME}-dtest-upgrade", propagate: false
def attempt = 1
retry(2) {
if (attempt > 1) {
sleep(60 * attempt)
}
attempt = attempt + 1
dtest_upgrade = build job: "${env.JOB_NAME}-dtest-upgrade", propagate: false
}
if (dtest_upgrade.result != 'SUCCESS') unstable('dtest failures')
if (dtest_upgrade.result == 'FAILURE') currentBuild.result='FAILURE'
}
@@ -343,14 +460,16 @@ ${FAILED_TESTS,maxTests=500,showMessage=false,showStack=false}
For complete test report and logs see https://nightlies.apache.org/cassandra/${JOB_NAME}/${BUILD_NUMBER}/
'''
}
sh "echo \"cassandra-builds at: `git -C cassandra-builds log -1 --pretty=format:'%h %an %ad %s'`\" > builds.head"
sh "find . -type f -name \\*.head -exec cat {} \\;"
sh "echo \"summary) cassandra-builds: `git -C cassandra-builds log -1 --pretty=format:'%H %an %ad %s'`\" > builds.head"
sh "./cassandra-builds/jenkins-dsl/print-shas.sh"
sh "xz TESTS-TestSuites.xml"
sh "wget --retry-connrefused --waitretry=1 \"\${BUILD_URL}/timestamps/?time=HH:mm:ss&timeZone=UTC&appendLog\" -qO - > console.log || echo wget failed"
sh "xz console.log"
sh "echo \"For test report and logs see https://nightlies.apache.org/cassandra/${JOB_NAME}/${BUILD_NUMBER}/\""
}
post {
always {
sshPublisher(publishers: [sshPublisherDesc(configName: 'Nightlies', transfers: [sshTransfer(remoteDirectory: 'cassandra/${JOB_NAME}/${BUILD_NUMBER}/', sourceFiles: 'TESTS-TestSuites.xml.xz')])])
sshPublisher(publishers: [sshPublisherDesc(configName: 'Nightlies', transfers: [sshTransfer(remoteDirectory: 'cassandra/${JOB_NAME}/${BUILD_NUMBER}/', sourceFiles: 'console.log.xz,TESTS-TestSuites.xml.xz')])])
}
}
}

0 comments on commit bb0e63f

Please sign in to comment.