diff --git a/CHANGELOG.md b/CHANGELOG.md index 245b6ffc7..6d0382592 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,6 +54,7 @@ - Refactor and create unit test for LeVADocumentUseCase.getReferencedDocumentsVersion ([#744](https://github.com/opendevstack/ods-jenkins-shared-library/pull/744)) - Remove non breakable white space from Json response of JiraService ([760](https://github.com/opendevstack/ods-jenkins-shared-library/pull/760)) - Rollback changes filtering components for TIP ([#762](https://github.com/opendevstack/ods-jenkins-shared-library/pull/762)) +- Fix SerializationException, Fix NULL in SSDS generation and Fix SaaS bug in FinalizeStage ([#756](https://github.com/opendevstack/ods-jenkins-shared-library/pull/756)) ## [3.0] - 2020-08-11 diff --git a/src/org/ods/component/Context.groovy b/src/org/ods/component/Context.groovy index f8feee6db..459abfd1d 100644 --- a/src/org/ods/component/Context.groovy +++ b/src/org/ods/component/Context.groovy @@ -1,5 +1,6 @@ package org.ods.component +import org.ods.util.ShellWithRetry import org.ods.util.Logger import org.ods.services.ServiceRegistry import org.ods.services.BitbucketService @@ -9,10 +10,14 @@ import org.ods.services.OpenShiftService import com.cloudbees.groovy.cps.NonCPS import groovy.json.JsonSlurperClassic import groovy.json.JsonOutput +import java.util.concurrent.ExecutionException @SuppressWarnings(['MethodCount', 'UnnecessaryObjectReferences']) class Context implements IContext { + static final int MAX_RETRIES = 5 + static final int WAIT_TIME_SECONDS = 1 + final List excludeFromContextDebugConfig = ['nexusPassword', 'nexusUsername'] // script is the context of the Jenkinsfile. That means that things like "sh" need to be called on script. private final def script @@ -34,8 +39,27 @@ class Context implements IContext { this.localCheckoutEnabled = localCheckoutEnabled } - @SuppressWarnings(['AbcMetric', 'CyclomaticComplexity', 'MethodSize', 'Instanceof']) def assemble() { + int retry = 0 + boolean executedWithErrors = true + + while (executedWithErrors && retry++ < MAX_RETRIES) { + try { + assembleWithRetry() + executedWithErrors = false + } catch (java.io.NotSerializableException err) { + logger.warn ("WARN: Jenkins serialization issue; attempt #: ${retry}, when: context.assemble()") + script.sleep(WAIT_TIME_SECONDS) + } + } + + if (executedWithErrors) { + throw new ExecutionException("Jenkins serialization issue, when: context.assemble()") + } + } + + @SuppressWarnings(['AbcMetric', 'CyclomaticComplexity', 'MethodSize', 'Instanceof']) + def assembleWithRetry() { logger.debug 'Validating input ...' // branchToEnvironmentMapping must be given, but it is OK to be empty - e.g. // if the repository should not be deployed to OpenShift at all. @@ -60,8 +84,8 @@ class Context implements IContext { config.sonarQubeEdition = script.env.SONAR_EDITION ?: 'community' config.globalExtensionImageLabels = getExtensionBuildParams() - config.globalExtensionImageLabels << getEnvParamsAndAddPrefix('OPENSHIFT_BUILD', - 'JENKINS_MASTER_') + config.globalExtensionImageLabels.putAll(getEnvParamsAndAddPrefix('OPENSHIFT_BUILD', + 'JENKINS_MASTER_')) logger.debug("Got external build labels: ${config.globalExtensionImageLabels}") @@ -164,8 +188,8 @@ class Context implements IContext { config.globalExtensionImageLabels = [:] } // get the build labels from the env running in .. - config.globalExtensionImageLabels << getEnvParamsAndAddPrefix('OPENSHIFT_BUILD', - 'JENKINS_AGENT_') + config.globalExtensionImageLabels.putAll(getEnvParamsAndAddPrefix('OPENSHIFT_BUILD', + 'JENKINS_AGENT_')) } boolean getDebug() { @@ -516,20 +540,27 @@ class Context implements IContext { } Map getEnvParamsAndAddPrefix (String envNamePattern = 'ods.build.', String keyPrefix = '') { - String rawEnv = script.sh( - returnStdout: true, script: "env | grep ${envNamePattern} || true", - label: 'getting extension labels from current environment' - ).trim() + String rawEnv = new ShellWithRetry(script, logger).execute( + returnStdout: true, + script: "env | grep ${envNamePattern} || true", + label: 'getting extension labels from current environment') - if (rawEnv.size() == 0 ) { + if (rawEnv.length() == 0 ) { return [:] } - return rawEnv.normalize().split(System.getProperty('line.separator')).inject([ : ] ) { kvMap, line -> - Iterator kv = line.toString().tokenize('=').iterator() - kvMap.put(keyPrefix + kv.next(), kv.hasNext() ? kv.next() : '') - kvMap + return normalizeEnvironment(rawEnv, keyPrefix) + } + + @NonCPS + Map normalizeEnvironment (String rawEnv, String keyPrefix) { + def lineSplitEnv = rawEnv.normalize().split(System.getProperty('line.separator')) + Map normalizedEnv = [ : ] + for (int lineC = 0; lineC < lineSplitEnv.size(); lineC++) { + def splittedLine = lineSplitEnv[lineC].toString().tokenize('=') + normalizedEnv.put(keyPrefix + splittedLine[0], splittedLine[1]) } + return normalizedEnv } String getOpenshiftApplicationDomain () { @@ -616,7 +647,7 @@ class Context implements IContext { private String commitHashForBuild(build) { return build - .getActions(hudson.plugins.git.util.BuildData.class) + .getActions(hudson.plugins.git.util.BuildData) .find { action -> action.getRemoteUrls().contains(config.gitUrl) } .getLastBuiltRevision().getSha1String() } diff --git a/src/org/ods/component/IContext.groovy b/src/org/ods/component/IContext.groovy index d2b39dcd5..96b36d417 100644 --- a/src/org/ods/component/IContext.groovy +++ b/src/org/ods/component/IContext.groovy @@ -197,4 +197,5 @@ interface IContext { // get commit the working tree boolean getCommitGitWorkingTree () + } diff --git a/src/org/ods/component/Pipeline.groovy b/src/org/ods/component/Pipeline.groovy index 7650c814c..8f0e31078 100644 --- a/src/org/ods/component/Pipeline.groovy +++ b/src/org/ods/component/Pipeline.groovy @@ -100,7 +100,9 @@ class Pipeline implements Serializable { replace(wtfEnvBug, "${defaultDockerRegistry}/") logger.warn ("Patched image via master env to: ${config.image}") } + context.assemble() + // register services after context was assembled logger.debug('-> Registering & loading global services') def registry = ServiceRegistry.instance @@ -356,7 +358,7 @@ class Pipeline implements Serializable { return this.ciSkipEnabled && gitService.ciSkipInCommitMessage } - private def prepareAgentPodConfig(Map config) { + private void prepareAgentPodConfig(Map config) { if (!config.image && !config.imageStreamTag && !config.podContainers) { script.error "One of 'image', 'imageStreamTag' or 'podContainers' is required" } diff --git a/src/org/ods/orchestration/BuildStage.groovy b/src/org/ods/orchestration/BuildStage.groovy index 11589c33d..4decaa8f6 100644 --- a/src/org/ods/orchestration/BuildStage.groovy +++ b/src/org/ods/orchestration/BuildStage.groovy @@ -57,12 +57,14 @@ class BuildStage extends Stage { "- no unit tests results will be reported") } + logger.info("levaDocScheduler.run start") levaDocScheduler.run( phase, MROPipelineUtil.PipelinePhaseLifecycleStage.POST_EXECUTE_REPO, repo, data ) + logger.info("levaDocScheduler.run end") } } @@ -88,7 +90,9 @@ class BuildStage extends Stage { def failedRepos = repos.flatten().findAll { it.data?.failedStage } if (project.isAssembleMode && project.isWorkInProgress && (project.hasFailingTests() || failedRepos.size > 0)) { - util.failBuild("Failing build as repositories contain errors!\nFailed: ${failedRepos}") + def errMessage = "Failing build as repositories contain errors!\nFailed: ${failedRepos}" + util.failBuild(errMessage) + throw new IllegalStateException(errMessage) } } diff --git a/src/org/ods/orchestration/DeployStage.groovy b/src/org/ods/orchestration/DeployStage.groovy index c1cce7258..b03ab41d9 100644 --- a/src/org/ods/orchestration/DeployStage.groovy +++ b/src/org/ods/orchestration/DeployStage.groovy @@ -151,4 +151,5 @@ class DeployStage extends Stage { logger.warn("No log state for ${repo.data} found!") } } + } diff --git a/src/org/ods/orchestration/FinalizeStage.groovy b/src/org/ods/orchestration/FinalizeStage.groovy index d1441afa6..e5abcb5fd 100644 --- a/src/org/ods/orchestration/FinalizeStage.groovy +++ b/src/org/ods/orchestration/FinalizeStage.groovy @@ -63,11 +63,12 @@ class FinalizeStage extends Stage { } repoFinalizeTasks.failFast = true script.parallel(repoFinalizeTasks) - + logger.debug("Integrate into main branch") if (project.isAssembleMode && !project.isWorkInProgress) { integrateIntoMainBranchRepos(steps, git) } + logger.debug("Gatering commits") gatherCreatedExecutionCommits(steps, git) if (!project.buildParams.rePromote) { @@ -131,21 +132,23 @@ class FinalizeStage extends Stage { private void pushRepos(IPipelineSteps steps, GitService git) { def flattenedRepos = repos.flatten() - def repoPushTasks = flattenedRepos.collectEntries { repo -> - [ - (repo.id): { - steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") { - if (project.isWorkInProgress) { - git.pushRef(repo.branch) - } else if (project.isAssembleMode) { - git.createTag(project.targetTag) - git.pushBranchWithTags(project.gitReleaseBranch) - } else { - git.createTag(project.targetTag) - git.pushRef(project.targetTag) - } + def repoPushTasks = [ : ] + def repoSize = flattenedRepos.size() + for (def i = 0; i < repoSize; i++) { + def repo = flattenedRepos[i] + repoPushTasks << [ (repo.id): { + steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") { + if (project.isWorkInProgress) { + git.pushRef(repo.branch) + } else if (project.isAssembleMode) { + git.createTag(project.targetTag) + git.pushBranchWithTags(project.gitReleaseBranch) + } else { + git.createTag(project.targetTag) + git.pushRef(project.targetTag) } } + } ] } repoPushTasks.failFast = true @@ -154,42 +157,48 @@ class FinalizeStage extends Stage { private void gatherCreatedExecutionCommits(IPipelineSteps steps, GitService git) { def flattenedRepos = repos.flatten() - def gatherCommitTasks = flattenedRepos.collectEntries { repo -> - [ - (repo.id): { - steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") { - repo.data.git.createdExecutionCommit = git.commitSha - } + def gatherCommitTasks = [ : ] + def repoSize = flattenedRepos.size() + for (def i = 0; i < repoSize; i++) { + def repo = flattenedRepos[i] + gatherCommitTasks << [ (repo.id): { + steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") { + repo.data.git.createdExecutionCommit = git.commitSha + steps.echo "repo.id: ${repo.id}: ${repo.data.git.createdExecutionCommit}" } + } ] } + gatherCommitTasks.failFast = true script.parallel(gatherCommitTasks) } private void integrateIntoMainBranchRepos(IPipelineSteps steps, GitService git) { def flattenedRepos = repos.flatten() - def repoIntegrateTasks = flattenedRepos - .findAll { it.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_TEST && - it.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_INFRA && - it.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_SAAS_SERVICE } - .collectEntries { repo -> - [ - (repo.id): { - steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") { - def filesToCheckout = [] - if (steps.fileExists('openshift')) { - filesToCheckout = ['openshift/ods-deployments.json'] - } else { - filesToCheckout = [ - 'openshift-exported/ods-deployments.json', - 'openshift-exported/template.yml' - ] - } - git.mergeIntoMainBranch(project.gitReleaseBranch, repo.branch, filesToCheckout) + def repoIntegrateTasks = [ : ] + def repoSize = flattenedRepos.size() + for (def i = 0; i < repoSize; i++) { + def repo = flattenedRepos[i] + if (repo.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_TEST && + repo.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_INFRA && + repo.type?.toLowerCase() != MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_SAAS_SERVICE ) { + repoIntegrateTasks << [ (repo.id): { + steps.dir("${steps.env.WORKSPACE}/${MROPipelineUtil.REPOS_BASE_DIR}/${repo.id}") { + def filesToCheckout = [] + if (steps.fileExists('openshift')) { + filesToCheckout = ['openshift/ods-deployments.json'] + } else { + filesToCheckout = [ + 'openshift-exported/ods-deployments.json', + 'openshift-exported/template.yml' + ] } + git.mergeIntoMainBranch(project.gitReleaseBranch, repo.branch, filesToCheckout) } + } ] + } } repoIntegrateTasks.failFast = true script.parallel(repoIntegrateTasks) @@ -199,10 +208,15 @@ class FinalizeStage extends Stage { // record release manager repo state logger.debug "Finalize: Recording HEAD commits from repos ..." logger.debug "On release manager commit ${git.commitSha}" - def gitHeads = repos.flatten().collectEntries { repo -> + def flattenedRepos = repos.flatten() + def gitHeads = [ : ] + def repoSize = flattenedRepos.size() + for (def i = 0; i < repoSize; i++) { + def repo = flattenedRepos[i] logger.debug "HEAD of repo '${repo.id}': ${repo.data.git.createdExecutionCommit}" - [(repo.id): (repo.data.git.createdExecutionCommit ?: '')] + gitHeads << [ (repo.id): (repo.data.git.createdExecutionCommit ?: '')] } + def envState = [ version: project.buildParams.version, changeId: project.buildParams.changeId, diff --git a/src/org/ods/orchestration/InitStage.groovy b/src/org/ods/orchestration/InitStage.groovy index 8fb7bc130..9e1a57c50 100644 --- a/src/org/ods/orchestration/InitStage.groovy +++ b/src/org/ods/orchestration/InitStage.groovy @@ -265,17 +265,17 @@ class InitStage extends Stage { def repos = project.repositories @SuppressWarnings('Indentation') Closure checkoutClosure = - { - script.parallel ( - repos.collectEntries { repo -> - logger.info("Loading Repository: ${repo}") - if (envState?.repositories) { - repo.data.envStateCommit = envState.repositories[repo.id] ?: '' + { + script.parallel ( + repos.collectEntries { repo -> + logger.info("Loading Repository: ${repo}") + if (envState?.repositories) { + repo.data.envStateCommit = envState.repositories[repo.id] ?: '' + } + util.prepareCheckoutRepoNamedJob(repo) } - util.prepareCheckoutRepoNamedJob(repo) - } - ) - } + ) + } Closure loadClosure = { logger.debugClocked('Project#load') @@ -287,9 +287,9 @@ class InitStage extends Stage { && buildParams.version == 'WIP') { throw new RuntimeException( 'Error: trying to deploy to Q or P without having defined a correct version. ' + - "${buildParams.version} version value is not allowed for those environments. " + - 'If you are using Jira, please check that all values are set in the release manager issue. ' + - "Build parameters obtained: ${buildParams}" + "${buildParams.version} version value is not allowed for those environments. " + + 'If you are using Jira, please check that all values are set in the release manager issue. ' + + "Build parameters obtained: ${buildParams}" ) } @@ -299,12 +299,12 @@ class InitStage extends Stage { } else if (project.buildParams.targetEnvironmentToken == 'Q') { throw new RuntimeException( "Git Tag '${project.targetTag}' already exists. " + - "It can only be deployed again to 'Q' if build param 'rePromote' is set to 'true'." + "It can only be deployed again to 'Q' if build param 'rePromote' is set to 'true'." ) } else { throw new RuntimeException( "Git Tag '${project.targetTag}' already exists. " + - "It cannot be deployed again to 'P'." + "It cannot be deployed again to 'P'." ) } } @@ -360,14 +360,14 @@ class InitStage extends Stage { } else if (project.buildParams.targetEnvironmentToken == 'Q') { util.warnBuild( "${repo.id}@${repo.data.git.commit} is NOT a descendant of ${repo.data.envStateCommit}, " + - "which has previously been promoted to 'Q'. If ${repo.data.envStateCommit} has been " + - "promoted to 'P' as well, promotion to 'P' will fail. Proceed with caution." + "which has previously been promoted to 'Q'. If ${repo.data.envStateCommit} has been " + + "promoted to 'P' as well, promotion to 'P' will fail. Proceed with caution." ) } else { throw new RuntimeException( "${repo.id}@${repo.data.git.commit} is NOT a descendant of ${repo.data.envStateCommit}, " + - "which has previously been promoted to 'P'. Ensure to merge everything that has been " + - "promoted to 'P' into ${project.gitReleaseBranch}." + "which has previously been promoted to 'P'. Ensure to merge everything that has been " + + "promoted to 'P' into ${project.gitReleaseBranch}." ) } } else { @@ -408,7 +408,7 @@ class InitStage extends Stage { if (project.buildParams.targetEnvironment == 'dev' && !os.envExists(targetProject)) { throw new RuntimeException( "Target project ${targetProject} does not exist " + - "(versionedDevEnvsEnabled=${project.versionedDevEnvsEnabled})." + "(versionedDevEnvsEnabled=${project.versionedDevEnvsEnabled})." ) } project.setTargetProject(targetProject) diff --git a/src/org/ods/orchestration/usecase/JUnitTestReportsUseCase.groovy b/src/org/ods/orchestration/usecase/JUnitTestReportsUseCase.groovy index 6f81e8fbe..d4935447c 100644 --- a/src/org/ods/orchestration/usecase/JUnitTestReportsUseCase.groovy +++ b/src/org/ods/orchestration/usecase/JUnitTestReportsUseCase.groovy @@ -9,24 +9,24 @@ import org.ods.orchestration.util.Project @SuppressWarnings(['JavaIoPackageAccess', 'EmptyCatchBlock']) class JUnitTestReportsUseCase { - private Project project - private IPipelineSteps steps + private final Project project + private final IPipelineSteps steps JUnitTestReportsUseCase(Project project, IPipelineSteps steps) { this.project = project this.steps = steps } + @NonCPS Map combineTestResults(List testResults) { def result = [ testsuites: [] ] - - testResults.each { testResult -> - result.testsuites.addAll(testResult.testsuites) + for (def i = 0; i < testResults.size(); i++) { + result.testsuites.addAll(testResults[i].testsuites) } - return result } + @NonCPS int getNumberOfTestCases(Map testResults) { def result = 0 @@ -50,15 +50,17 @@ class JUnitTestReportsUseCase { return result } + @NonCPS Map parseTestReportFiles(List files) { - def testResults = files.collect { file -> - JUnitParser.parseJUnitXML(file.text) + List testResults = [] + for (def i = 0; i < files.size(); i++) { + testResults.add(JUnitParser.parseJUnitXML(files[i].text)) } - return this.combineTestResults(testResults) } void reportTestReportsFromPathToJenkins(String path) { this.steps.junit("${path}/**/*.xml") } + } diff --git a/src/org/ods/orchestration/usecase/LeVADocumentUseCase.groovy b/src/org/ods/orchestration/usecase/LeVADocumentUseCase.groovy index 09134951e..00e814d50 100644 --- a/src/org/ods/orchestration/usecase/LeVADocumentUseCase.groovy +++ b/src/org/ods/orchestration/usecase/LeVADocumentUseCase.groovy @@ -5,7 +5,14 @@ import groovy.xml.XmlUtil import org.ods.orchestration.scheduler.LeVADocumentScheduler import org.ods.orchestration.service.DocGenService import org.ods.orchestration.service.LeVADocumentChaptersFileService -import org.ods.orchestration.util.* +import org.ods.orchestration.util.DocumentHistory +import org.ods.orchestration.util.Environment +import org.ods.orchestration.util.LeVADocumentUtil +import org.ods.orchestration.util.MROPipelineUtil +import org.ods.orchestration.util.PDFUtil +import org.ods.orchestration.util.PipelineUtil +import org.ods.orchestration.util.Project +import org.ods.orchestration.util.SortUtil import org.ods.services.GitService import org.ods.services.JenkinsService import org.ods.services.NexusService @@ -967,7 +974,7 @@ class LeVADocumentUseCase extends DocGenUseCase { .findAll { it.odsRepoType.toLowerCase() == MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_CODE.toLowerCase() } .collect { component -> // We will set-up a double loop in the template. For moustache limitations we need to have lists - component.requirements = component.requirements.collect { r -> + component.requirements = component.requirements.findAll{it != null}.collect { r -> [key: r.key, name: r.name, reqDescription: this.convertImages(r.description), gampTopic: r.gampTopic ?: "uncategorized"] }.groupBy { it.gampTopic.toLowerCase() } @@ -1216,6 +1223,7 @@ class LeVADocumentUseCase extends DocGenUseCase { return data.collect { it.subMap(['key', 'requirements', 'bugs']).values() }.flatten() } + @NonCPS List getSupportedDocuments() { return DocumentType.values().collect { it as String } } diff --git a/src/org/ods/util/CollectionWithForLoop.groovy b/src/org/ods/util/CollectionWithForLoop.groovy new file mode 100644 index 000000000..969aeb5a5 --- /dev/null +++ b/src/org/ods/util/CollectionWithForLoop.groovy @@ -0,0 +1,26 @@ +package org.ods.util + +class CollectionWithForLoop { + + static List findAll(Collection c, Closure filter) { + def results = [] + + for (def i = 0; i < c.size(); i++) { + if (filter(c[i])) { + results.add(c[i]) + } + } + return results + } + + static Map collectEntries(Collection c, Closure key, Closure value) { + Map results = [:] + + for (def i = 0; i < c.size(); i++) { + results[key(c[i])] = value(c[i]) + } + + return results + } + +} diff --git a/src/org/ods/util/ShellWithRetry.groovy b/src/org/ods/util/ShellWithRetry.groovy new file mode 100644 index 000000000..af656bf62 --- /dev/null +++ b/src/org/ods/util/ShellWithRetry.groovy @@ -0,0 +1,42 @@ +package org.ods.util + +import java.util.concurrent.ExecutionException + +class ShellWithRetry { + + static final int MAX_RETRIES = 5 + static final int WAIT_TIME_SECONDS = 5 + + private final ILogger logger + private final def jenkinsFileContext + + ShellWithRetry(def jenkinsFileContext, ILogger logger) { + this.jenkinsFileContext = jenkinsFileContext + this.logger = logger + } + + String execute(Map shellParams) { + String returnScript + int retry = 0 + boolean executedWithErrors = true + while (executedWithErrors && retry++ < MAX_RETRIES) { + try { + returnScript = jenkinsFileContext.sh( + script: shellParams.script, + returnStdout: shellParams.returnStdout, + label: shellParams.label + ) + executedWithErrors = false + } catch (java.io.NotSerializableException err) { + logger.warn ("WARN: Jenkins serialization issue; attempt #: ${retry}, when: [${shellParams.script}]") + jenkinsFileContext.sleep(WAIT_TIME_SECONDS) + } + } + + if (executedWithErrors) { + throw new ExecutionException("Jenkins serialization issue, when: [${shellParams.script}]") + } + return returnScript.trim() + } + +} diff --git a/test/groovy/org/ods/component/ContextSpec.groovy b/test/groovy/org/ods/component/ContextSpec.groovy index ad8288e1c..2c1205894 100644 --- a/test/groovy/org/ods/component/ContextSpec.groovy +++ b/test/groovy/org/ods/component/ContextSpec.groovy @@ -1,7 +1,9 @@ package org.ods.component import org.ods.PipelineScript +import org.ods.util.ILogger import org.ods.util.Logger +import org.ods.util.ShellWithRetry import spock.lang.* class ContextSpec extends Specification { @@ -97,6 +99,49 @@ class ContextSpec extends Specification { 'feature/foo-123-bar' | noEnv | 'preview' } + def "assemble with retry"() { + given: + Context context = Spy(new Context(script, null, logger) ) + + when: + context.assemble() + + then: + 1 * context.assembleWithRetry( ) >> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * context.assembleWithRetry( )>> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * context.assembleWithRetry( ) >> { return } + } + + def "assemble with retry and error"() { + given: + Context context = Spy(new Context(script, null, logger) ) + + when: + context.assemble() + + then: + 1 * context.assembleWithRetry( ) >> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * context.assembleWithRetry( )>> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * context.assembleWithRetry( )>> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * context.assembleWithRetry( )>> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * context.assembleWithRetry( )>> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + thrown java.util.concurrent.ExecutionException + } + // resets config.environment and call determineEnvironment on newly created Context object void determineEnvironment(config, existingEnvironments, String branch) { config.environment = null diff --git a/test/groovy/org/ods/util/CollectionWithForLoopSpec.groovy b/test/groovy/org/ods/util/CollectionWithForLoopSpec.groovy new file mode 100644 index 000000000..4c36e806b --- /dev/null +++ b/test/groovy/org/ods/util/CollectionWithForLoopSpec.groovy @@ -0,0 +1,33 @@ +package org.ods.util + +import spock.lang.Specification + +class CollectionWithForLoopSpec extends Specification { + + def "findAll"() { + given: + def fruitNames = ["apples", "bananas"] + + when: + def withFor = CollectionWithForLoop.findAll(fruitNames, {it.length() > 6} ) + def collectionOriginal = fruitNames.findAll {it.length() > 6} + then: + withFor == ["bananas"] + withFor == collectionOriginal + } + + def "collectEntries"() { + given: + def fruits = [ [id: "apples"], [id: "bananas"] ] + + when: + def collectWithFor = CollectionWithForLoop.collectEntries(fruits, + { it.id }) {return "tastes yummy!"} + def collectOriginal= fruits.collectEntries { + [(it.id): "tastes yummy!"]} + + then: + collectWithFor == collectOriginal + collectWithFor == ["apples": "tastes yummy!", "bananas": "tastes yummy!"] + } +} diff --git a/test/groovy/org/ods/util/ShellWithRetrySpec.groovy b/test/groovy/org/ods/util/ShellWithRetrySpec.groovy new file mode 100644 index 000000000..01d3aa1e6 --- /dev/null +++ b/test/groovy/org/ods/util/ShellWithRetrySpec.groovy @@ -0,0 +1,60 @@ +package org.ods.util + +import org.ods.PipelineScript +import spock.lang.Specification + +class ShellWithRetrySpec extends Specification { + + def "execute"() { + given: + Map params = [ returnStdout: true, + script: "env | grep || true", + label: 'getting extension labels from current environment'] + PipelineScript script = Mock() + ILogger logger = Mock() + + when: + def shellReturn = new ShellWithRetry(script, logger).execute(params) + + then: + 1 * script.sh( params ) >> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * script.sh( params ) >> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * script.sh( params ) >> { return "ok"} + shellReturn == "ok" + } + + def "execute with exception thrown"() { + given: + Map params = [ returnStdout: true, + script: "env | grep || true", + label: 'getting extension labels from current environment'] + PipelineScript script = Mock() + ILogger logger = Mock() + + when: + new ShellWithRetry(script, logger).execute(params) + + then: + 1 * script.sh( params ) >> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * script.sh( params ) >> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * script.sh( params ) >> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * script.sh( params ) >> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + 1 * script.sh( params ) >> { throw new NotSerializableException("error")} + 1 * logger.warn{String it -> it.contains("WARN: Jenkins serialization issue")} + + thrown java.util.concurrent.ExecutionException + } + +}