Skip to content

Commit

Permalink
Fix ArrayList error in Orchestration pipeline - 4.x (#756) (#770)
Browse files Browse the repository at this point in the history
Fix SerializationException
Fix NULL in SSDS generation
Fix SaaS bug in FinalizeStage

Co-authored-by: Martin Etmajer <martin.etmajer@boehringer-ingelheim.com>
  • Loading branch information
s2oBCN and metmajer committed Nov 12, 2021
1 parent 8005e9b commit c4bf8f9
Show file tree
Hide file tree
Showing 15 changed files with 359 additions and 88 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@
- Automatically change the date_created by Wiremock with a Wiremock wildcard ([#743](https://github.com/opendevstack/ods-jenkins-shared-library/pull/743))
- Refactor and create unit test for LeVADocumentUseCase.getReferencedDocumentsVersion ([#744](https://github.com/opendevstack/ods-jenkins-shared-library/pull/744))
- Remove non breakable white space from Json response of JiraService ([760](https://github.com/opendevstack/ods-jenkins-shared-library/pull/760))
- Rollback changes filtering components for TIP ([#762](https://github.com/opendevstack/ods-jenkins-shared-library/pull/762))
- Fix SerializationException, Fix NULL in SSDS generation and Fix SaaS bug in FinalizeStage ([#756](https://github.com/opendevstack/ods-jenkins-shared-library/pull/756))

## [3.0] - 2020-08-11

Expand Down
61 changes: 46 additions & 15 deletions src/org/ods/component/Context.groovy
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package org.ods.component

import org.ods.util.ShellWithRetry
import org.ods.util.Logger
import org.ods.services.ServiceRegistry
import org.ods.services.BitbucketService
Expand All @@ -9,10 +10,14 @@ import org.ods.services.OpenShiftService
import com.cloudbees.groovy.cps.NonCPS
import groovy.json.JsonSlurperClassic
import groovy.json.JsonOutput
import java.util.concurrent.ExecutionException

@SuppressWarnings(['MethodCount', 'UnnecessaryObjectReferences'])
class Context implements IContext {

static final int MAX_RETRIES = 5
static final int WAIT_TIME_SECONDS = 1

final List excludeFromContextDebugConfig = ['nexusPassword', 'nexusUsername']
// script is the context of the Jenkinsfile. That means that things like "sh" need to be called on script.
private final def script
Expand All @@ -34,8 +39,27 @@ class Context implements IContext {
this.localCheckoutEnabled = localCheckoutEnabled
}

@SuppressWarnings(['AbcMetric', 'CyclomaticComplexity', 'MethodSize', 'Instanceof'])
def assemble() {
int retry = 0
boolean executedWithErrors = true

while (executedWithErrors && retry++ < MAX_RETRIES) {
try {
assembleWithRetry()
executedWithErrors = false
} catch (java.io.NotSerializableException err) {
logger.warn ("WARN: Jenkins serialization issue; attempt #: ${retry}, when: context.assemble()")
script.sleep(WAIT_TIME_SECONDS)
}
}

if (executedWithErrors) {
throw new ExecutionException("Jenkins serialization issue, when: context.assemble()")
}
}

@SuppressWarnings(['AbcMetric', 'CyclomaticComplexity', 'MethodSize', 'Instanceof'])
def assembleWithRetry() {
logger.debug 'Validating input ...'
// branchToEnvironmentMapping must be given, but it is OK to be empty - e.g.
// if the repository should not be deployed to OpenShift at all.
Expand All @@ -60,8 +84,8 @@ class Context implements IContext {
config.sonarQubeEdition = script.env.SONAR_EDITION ?: 'community'

config.globalExtensionImageLabels = getExtensionBuildParams()
config.globalExtensionImageLabels << getEnvParamsAndAddPrefix('OPENSHIFT_BUILD',
'JENKINS_MASTER_')
config.globalExtensionImageLabels.putAll(getEnvParamsAndAddPrefix('OPENSHIFT_BUILD',
'JENKINS_MASTER_'))

logger.debug("Got external build labels: ${config.globalExtensionImageLabels}")

Expand Down Expand Up @@ -164,8 +188,8 @@ class Context implements IContext {
config.globalExtensionImageLabels = [:]
}
// get the build labels from the env running in ..
config.globalExtensionImageLabels << getEnvParamsAndAddPrefix('OPENSHIFT_BUILD',
'JENKINS_AGENT_')
config.globalExtensionImageLabels.putAll(getEnvParamsAndAddPrefix('OPENSHIFT_BUILD',
'JENKINS_AGENT_'))
}

boolean getDebug() {
Expand Down Expand Up @@ -516,20 +540,27 @@ class Context implements IContext {
}

Map<String,String> getEnvParamsAndAddPrefix (String envNamePattern = 'ods.build.', String keyPrefix = '') {
String rawEnv = script.sh(
returnStdout: true, script: "env | grep ${envNamePattern} || true",
label: 'getting extension labels from current environment'
).trim()
String rawEnv = new ShellWithRetry(script, logger).execute(
returnStdout: true,
script: "env | grep ${envNamePattern} || true",
label: 'getting extension labels from current environment')

if (rawEnv.size() == 0 ) {
if (rawEnv.length() == 0 ) {
return [:]
}

return rawEnv.normalize().split(System.getProperty('line.separator')).inject([ : ] ) { kvMap, line ->
Iterator kv = line.toString().tokenize('=').iterator()
kvMap.put(keyPrefix + kv.next(), kv.hasNext() ? kv.next() : '')
kvMap
return normalizeEnvironment(rawEnv, keyPrefix)
}

@NonCPS
Map<String,String> normalizeEnvironment (String rawEnv, String keyPrefix) {
def lineSplitEnv = rawEnv.normalize().split(System.getProperty('line.separator'))
Map normalizedEnv = [ : ]
for (int lineC = 0; lineC < lineSplitEnv.size(); lineC++) {
def splittedLine = lineSplitEnv[lineC].toString().tokenize('=')
normalizedEnv.put(keyPrefix + splittedLine[0], splittedLine[1])
}
return normalizedEnv
}

String getOpenshiftApplicationDomain () {
Expand Down Expand Up @@ -616,7 +647,7 @@ class Context implements IContext {

private String commitHashForBuild(build) {
return build
.getActions(hudson.plugins.git.util.BuildData.class)
.getActions(hudson.plugins.git.util.BuildData)
.find { action -> action.getRemoteUrls().contains(config.gitUrl) }
.getLastBuiltRevision().getSha1String()
}
Expand Down
1 change: 1 addition & 0 deletions src/org/ods/component/IContext.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -197,4 +197,5 @@ interface IContext {

// get commit the working tree
boolean getCommitGitWorkingTree ()

}
4 changes: 3 additions & 1 deletion src/org/ods/component/Pipeline.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,9 @@ class Pipeline implements Serializable {
replace(wtfEnvBug, "${defaultDockerRegistry}/")
logger.warn ("Patched image via master env to: ${config.image}")
}

context.assemble()

// register services after context was assembled
logger.debug('-> Registering & loading global services')
def registry = ServiceRegistry.instance
Expand Down Expand Up @@ -356,7 +358,7 @@ class Pipeline implements Serializable {
return this.ciSkipEnabled && gitService.ciSkipInCommitMessage
}

private def prepareAgentPodConfig(Map config) {
private void prepareAgentPodConfig(Map config) {
if (!config.image && !config.imageStreamTag && !config.podContainers) {
script.error "One of 'image', 'imageStreamTag' or 'podContainers' is required"
}
Expand Down
6 changes: 5 additions & 1 deletion src/org/ods/orchestration/BuildStage.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,14 @@ class BuildStage extends Stage {
"- no unit tests results will be reported")
}

logger.info("levaDocScheduler.run start")
levaDocScheduler.run(
phase,
MROPipelineUtil.PipelinePhaseLifecycleStage.POST_EXECUTE_REPO,
repo,
data
)
logger.info("levaDocScheduler.run end")
}
}

Expand All @@ -88,7 +90,9 @@ class BuildStage extends Stage {
def failedRepos = repos.flatten().findAll { it.data?.failedStage }
if (project.isAssembleMode && project.isWorkInProgress &&
(project.hasFailingTests() || failedRepos.size > 0)) {
util.failBuild("Failing build as repositories contain errors!\nFailed: ${failedRepos}")
def errMessage = "Failing build as repositories contain errors!\nFailed: ${failedRepos}"
util.failBuild(errMessage)
throw new IllegalStateException(errMessage)
}
}

Expand Down
1 change: 1 addition & 0 deletions src/org/ods/orchestration/DeployStage.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -151,4 +151,5 @@ class DeployStage extends Stage {
logger.warn("No log state for ${repo.data} found!")
}
}

}
94 changes: 54 additions & 40 deletions src/org/ods/orchestration/FinalizeStage.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,12 @@ class FinalizeStage extends Stage {
}
repoFinalizeTasks.failFast = true
script.parallel(repoFinalizeTasks)

logger.debug("Integrate into main branch")
if (project.isAssembleMode && !project.isWorkInProgress) {
integrateIntoMainBranchRepos(steps, git)
}

logger.debug("Gatering commits")
gatherCreatedExecutionCommits(steps, git)

if (!project.buildParams.rePromote) {
Expand Down Expand Up @@ -131,21 +132,23 @@ class FinalizeStage extends Stage {

private void pushRepos(IPipelineSteps steps, GitService git) {
def flattenedRepos = repos.flatten()
def repoPushTasks = flattenedRepos.collectEntries { repo ->
[
(repo.id): {
steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") {
if (project.isWorkInProgress) {
git.pushRef(repo.branch)
} else if (project.isAssembleMode) {
git.createTag(project.targetTag)
git.pushBranchWithTags(project.gitReleaseBranch)
} else {
git.createTag(project.targetTag)
git.pushRef(project.targetTag)
}
def repoPushTasks = [ : ]
def repoSize = flattenedRepos.size()
for (def i = 0; i < repoSize; i++) {
def repo = flattenedRepos[i]
repoPushTasks << [ (repo.id): {
steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") {
if (project.isWorkInProgress) {
git.pushRef(repo.branch)
} else if (project.isAssembleMode) {
git.createTag(project.targetTag)
git.pushBranchWithTags(project.gitReleaseBranch)
} else {
git.createTag(project.targetTag)
git.pushRef(project.targetTag)
}
}
}
]
}
repoPushTasks.failFast = true
Expand All @@ -154,42 +157,48 @@ class FinalizeStage extends Stage {

private void gatherCreatedExecutionCommits(IPipelineSteps steps, GitService git) {
def flattenedRepos = repos.flatten()
def gatherCommitTasks = flattenedRepos.collectEntries { repo ->
[
(repo.id): {
steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") {
repo.data.git.createdExecutionCommit = git.commitSha
}
def gatherCommitTasks = [ : ]
def repoSize = flattenedRepos.size()
for (def i = 0; i < repoSize; i++) {
def repo = flattenedRepos[i]
gatherCommitTasks << [ (repo.id): {
steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") {
repo.data.git.createdExecutionCommit = git.commitSha
steps.echo "repo.id: ${repo.id}: ${repo.data.git.createdExecutionCommit}"
}
}
]
}

gatherCommitTasks.failFast = true
script.parallel(gatherCommitTasks)
}

private void integrateIntoMainBranchRepos(IPipelineSteps steps, GitService git) {
def flattenedRepos = repos.flatten()
def repoIntegrateTasks = flattenedRepos
.findAll { it.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_TEST &&
it.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_INFRA &&
it.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_SAAS_SERVICE }
.collectEntries { repo ->
[
(repo.id): {
steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") {
def filesToCheckout = []
if (steps.fileExists('openshift')) {
filesToCheckout = ['openshift/ods-deployments.json']
} else {
filesToCheckout = [
'openshift-exported/ods-deployments.json',
'openshift-exported/template.yml'
]
}
git.mergeIntoMainBranch(project.gitReleaseBranch, repo.branch, filesToCheckout)
def repoIntegrateTasks = [ : ]
def repoSize = flattenedRepos.size()
for (def i = 0; i < repoSize; i++) {
def repo = flattenedRepos[i]
if (repo.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_TEST &&
repo.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_INFRA &&
repo.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_SAAS_SERVICE ) {
repoIntegrateTasks << [ (repo.id): {
steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") {
def filesToCheckout = []
if (steps.fileExists('openshift')) {
filesToCheckout = ['openshift/ods-deployments.json']
} else {
filesToCheckout = [
'openshift-exported/ods-deployments.json',
'openshift-exported/template.yml'
]
}
git.mergeIntoMainBranch(project.gitReleaseBranch, repo.branch, filesToCheckout)
}
}
]
}
}
repoIntegrateTasks.failFast = true
script.parallel(repoIntegrateTasks)
Expand All @@ -199,10 +208,15 @@ class FinalizeStage extends Stage {
// record release manager repo state
logger.debug "Finalize: Recording HEAD commits from repos ..."
logger.debug "On release manager commit ${git.commitSha}"
def gitHeads = repos.flatten().collectEntries { repo ->
def flattenedRepos = repos.flatten()
def gitHeads = [ : ]
def repoSize = flattenedRepos.size()
for (def i = 0; i < repoSize; i++) {
def repo = flattenedRepos[i]
logger.debug "HEAD of repo '${repo.id}': ${repo.data.git.createdExecutionCommit}"
[(repo.id): (repo.data.git.createdExecutionCommit ?: '')]
gitHeads << [ (repo.id): (repo.data.git.createdExecutionCommit ?: '')]
}

def envState = [
version: project.buildParams.version,
changeId: project.buildParams.changeId,
Expand Down
Loading

0 comments on commit c4bf8f9

Please sign in to comment.