Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix/extract enums2 (#900) #902

Merged
merged 4 commits into from
May 9, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 9 additions & 4 deletions .github/workflows/gradle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,15 @@ jobs:
- name: Checkout
uses: actions/checkout@v2.0.0

- name: check the value of github.workspace and runner.temp
run: |
echo "github.workspace = ${{ github.workspace }}"
echo "runner.temp = ${{ runner.temp }}"

- name: Set up OpenJDK 11
uses: actions/setup-java@v2
uses: actions/setup-java@v3
with:
distribution: 'adopt'
distribution: 'temurin'
java-version: '11'
check-latest: true

Expand All @@ -25,7 +30,7 @@ jobs:
${{ runner.os }}-gradle-

- name: Build with Gradle
run: ./gradlew clean build
run: ./gradlew -Djava.io.tmpdir=${{ runner.temp }} clean build
env:
NO_NEXUS: true

Expand All @@ -44,7 +49,7 @@ jobs:

- name: Test Report
uses: actions/upload-artifact@v2
if: ${{ failure() }}
if: ${{ always() }}
with:
name: Test Report
path: build/reports/tests/test/
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
/.project
/gradle.properties

/lib/*.jar

.idea
*.iws
*.iml
Expand Down
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@
- Add new Infrastructure Service ([#740](https://github.com/opendevstack/ods-jenkins-shared-library/pull/740))
- Add annotation to agents by default ([#879](https://github.com/opendevstack/ods-jenkins-shared-library/issues/879))
- Fixed uploading of artifacts to 'pypi' repositories ([#785](https://github.com/opendevstack/ods-jenkins-shared-library/issues/785))

- Improve memory cleanUp ([#902](https://github.com/opendevstack/ods-jenkins-shared-library/pull/902))

## [3.0] - 2020-08-11

Expand Down
1 change: 0 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ plugins {

group "org.ods"
version = '0.0.1-SNAPSHOT'
sourceCompatibility = 1.8

ext {
nexus_url = project.findProperty('nexus_url') ?: System.getenv('NEXUS_URL') ?: System.getenv('NEXUS_HOST')
Expand Down
2 changes: 1 addition & 1 deletion gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.4.1-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
44 changes: 31 additions & 13 deletions src/org/ods/component/Pipeline.groovy
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
package org.ods.component

import org.ods.services.GitService
import groovy.json.JsonOutput
import org.ods.services.BitbucketService
import org.ods.services.GitService
import org.ods.services.JenkinsService
import org.ods.services.NexusService
import org.ods.services.OpenShiftService
import org.ods.services.ServiceRegistry
import org.ods.util.GitCredentialStore
import org.ods.util.ILogger
import org.ods.util.IPipelineSteps
import org.ods.util.PipelineSteps
import org.ods.services.JenkinsService
import org.ods.services.NexusService
import groovy.json.JsonOutput

class Pipeline implements Serializable {

Expand All @@ -19,9 +19,9 @@ class Pipeline implements Serializable {
private JenkinsService jenkinsService
private BitbucketService bitbucketService

private final ILogger logger
private final def script
private final IPipelineSteps steps
private ILogger logger
private def script
private IPipelineSteps steps
private IContext context
private boolean notifyNotGreen = true
private boolean ciSkipEnabled = true
Expand Down Expand Up @@ -98,7 +98,7 @@ class Pipeline implements Serializable {
if (config.image?.startsWith(wtfEnvBug)) {
config.image = config.image.
replace(wtfEnvBug, "${defaultDockerRegistry}/")
logger.warn ("Patched image via master env to: ${config.image}")
logger.warn("Patched image via master env to: ${config.image}")
}

context.assemble()
Expand Down Expand Up @@ -247,7 +247,7 @@ class Pipeline implements Serializable {
}
stages(context)
if (context.commitGitWorkingTree) {
gitService.commit ([], "system-commit ods, [ci skip]", true)
gitService.commit([], "system-commit ods, [ci skip]", true)
gitService.pushRef(context.gitBranch)
}
}
Expand Down Expand Up @@ -286,15 +286,33 @@ class Pipeline implements Serializable {
}
logger.debugClocked("${config.componentId}",
"ODS Component Pipeline '${context.componentId}-${context.buildNumber}'\r" +
"ODS Build Artifacts '${context.componentId}': " +
"\r${JsonOutput.prettyPrint(JsonOutput.toJson(context.getBuildArtifactURIs()))}"
"ODS Build Artifacts '${context.componentId}': " +
"\r${JsonOutput.prettyPrint(JsonOutput.toJson(context.getBuildArtifactURIs()))}"
)
if (!!!script.env.MULTI_REPO_BUILD) {
cleanUp()
}
}
}
}
}
}

private void cleanUp() {
logger.debug('-- SHUTTING DOWN RM (..) --')
logger.resetStopwatch()
this.script = null
this.steps = null
this.logger = null

this.gitService = null
this.openShiftService = null
this.jenkinsService = null
this.bitbucketService = null

ServiceRegistry.removeInstance()
}

def setupForMultiRepoBuild(def config) {
logger.info '-> Detected multirepo orchestration pipeline build'
config.localCheckoutEnabled = false
Expand Down Expand Up @@ -400,8 +418,8 @@ class Pipeline implements Serializable {
config.annotations = [
script.podAnnotation(
key: 'cluster-autoscaler.kubernetes.io/safe-to-evict', value: 'false'
)
]
)
]
}
}

Expand Down
134 changes: 67 additions & 67 deletions src/org/ods/openshift/OpenShiftResourceMetadata.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ import org.ods.util.IPipelineSteps
* @See <a href="https://github.com/gorkem/app-labels/blob/master/labels-annotation-for-openshift.adoc" >
* Guidelines for Labels and Annotations for OpenShift applications</a>
* @See <a href="https://helm.sh/docs/chart_best_practices/labels/" > Helm: Labels and Annotations</a>
*
*/
class OpenShiftResourceMetadata {

// Standard roles recognized by OpenShift. Arbitrary roles are supported.
static final ROLE_FRONTEND = 'frontend'
static final ROLE_BACKEND = 'backend'
Expand Down Expand Up @@ -173,6 +173,71 @@ class OpenShiftResourceMetadata {
setMetadata(metadata, pauseRollouts, deployments)
}

/**
* Sanitize all metadata values to make sure they are valid label values.
* Valid label values must be 63 characters or less and must be empty
* or begin and end with an alphanumeric character ([a-z0-9A-Z])
* with dashes (-), underscores (_), dots (.), and alphanumerics between.
* If an illegal value is found for an entry that allows modifications, the value will be sanitized as follows:
* 1. Any non-alphanumeric characters will be removed from the beginning of the value.
* 2. If it's longer than 63 characters, the trailing characters after the 63rd will be removed.
* 3. Any non-alphanumeric characters will be removed from the end of the value.
* 4. Every remaining illegal character will be replaced with an underscore.
*
* NOTE: If, after step 1, the value is empty, an exception will be risen
* instead of silently assigning an empty value. This situation should be rare, only for non-empty values
* consisting only of non-alphanumeric characters.
*
* If an illegal value is found for an entry that does not allow modifications,
* an exception with an informative message will be risen, thus ending the labelling process.
*
* All values are converted to strings using the <code>toString()</code> method.
*
* @param metadata a <Map> with the metadata entries to validate.
* @return the metadata with <code>String</code>, possibly sanitized, values.
* @throws IllegalArgumentException if an illegal value is found for an entry that does not allow modifications
* or a value is found that consists entirely in non-alphanumeric characters.
*/
private static sanitizeValues(metadata) {
return (Map<String, String>) metadata.collectEntries { key, value ->
if (value == null) {
return [(key): null]
}
def stringValue = value.toString()
if (!stringValue) {
return [(key): stringValue]
}
def sanitizedValue = stringValue
def end = sanitizedValue.length()
def i = 0
while (i < end && !Character.isLetterOrDigit(sanitizedValue.charAt(i))) {
i++
}
if (i == end) {
throw new IllegalArgumentException('Metadata entries must not entirely consist of ' +
"non-alphanumeric characters. Please, check the metadata.yml file: ${key}=${value}")
}
// Now the value is warranted to contain, at least, one alphanumeric character, at position i.
def j = Math.min(end, i + 63)
// No guard needed.
while (!Character.isLetterOrDigit(sanitizedValue.charAt(j - 1))) {
j--
}
if (i > 0 || j < end) {
sanitizedValue = sanitizedValue.subSequence(i, j)
}
def matcher = sanitizedValue =~ LABEL_VALUE_PATTERN
sanitizedValue = matcher.replaceAll('_')
if (sanitizedValue != stringValue && strictEntries.contains(key)) {
throw new IllegalArgumentException('Illegal value for metadata entry. ' +
'Values must be 63 characters or less, begin and end with an alphanumeric character and ' +
"contain only alphanumerics, '-', '_' and '.'. Please, check the metadata.yml file: " +
"${key}=${value}")
}
return [(key): sanitizedValue]
}
}

/**
* Retrieves metadata for the component.
* All metadata values are warranted to be valid strings to be used as label values.
Expand Down Expand Up @@ -274,7 +339,7 @@ class OpenShiftResourceMetadata {
metadata.putAll([
systemName: steps.env?.BUILD_PARAM_CONFIGITEM,
projectVersion: steps.env?.BUILD_PARAM_CHANGEID,
workInProgress: steps.env?.BUILD_PARAM_VERSION == 'WIP'
workInProgress: steps.env?.BUILD_PARAM_VERSION == 'WIP',
])
} else {
// For the moment, we don't allow the users to customize these labels
Expand All @@ -288,71 +353,6 @@ class OpenShiftResourceMetadata {
return metadata
}

/**
* Sanitize all metadata values to make sure they are valid label values.
* Valid label values must be 63 characters or less and must be empty
* or begin and end with an alphanumeric character ([a-z0-9A-Z])
* with dashes (-), underscores (_), dots (.), and alphanumerics between.
* If an illegal value is found for an entry that allows modifications, the value will be sanitized as follows:
* 1. Any non-alphanumeric characters will be removed from the beginning of the value.
* 2. If it's longer than 63 characters, the trailing characters after the 63rd will be removed.
* 3. Any non-alphanumeric characters will be removed from the end of the value.
* 4. Every remaining illegal character will be replaced with an underscore.
*
* NOTE: If, after step 1, the value is empty, an exception will be risen
* instead of silently assigning an empty value. This situation should be rare, only for non-empty values
* consisting only of non-alphanumeric characters.
*
* If an illegal value is found for an entry that does not allow modifications,
* an exception with an informative message will be risen, thus ending the labelling process.
*
* All values are converted to strings using the <code>toString()</code> method.
*
* @param metadata a <Map> with the metadata entries to validate.
* @return the metadata with <code>String</code>, possibly sanitized, values.
* @throws IllegalArgumentException if an illegal value is found for an entry that does not allow modifications
* or a value is found that consists entirely in non-alphanumeric characters.
*/
private static sanitizeValues(metadata) {
return (Map<String, String>) metadata.collectEntries { key, value ->
if (value == null) {
return [(key): null]
}
def stringValue = value.toString()
if (!stringValue) {
return [(key): stringValue]
}
def sanitizedValue = stringValue
def end = sanitizedValue.length()
def i = 0
while (i < end && !Character.isLetterOrDigit(sanitizedValue.charAt(i))) {
i++
}
if (i == end) {
throw new IllegalArgumentException('Metadata entries must not entirely consist of ' +
"non-alphanumeric characters. Please, check the metadata.yml file: ${key}=${value}")
}
// Now the value is warranted to contain, at least, one alphanumeric character, at position i.
def j = Math.min(end, i + 63)
// No guard needed.
while (!Character.isLetterOrDigit(sanitizedValue.charAt(j - 1))) {
j--
}
if (i > 0 || j < end) {
sanitizedValue = sanitizedValue.subSequence(i, j)
}
def matcher = sanitizedValue =~ LABEL_VALUE_PATTERN
sanitizedValue = matcher.replaceAll('_')
if (sanitizedValue != stringValue && strictEntries.contains(key)) {
throw new IllegalArgumentException('Illegal value for metadata entry. ' +
'Values must be 63 characters or less, begin and end with an alphanumeric character and ' +
"contain only alphanumerics, '-', '_' and '.'. Please, check the metadata.yml file: " +
"${key}=${value}")
}
return [(key): sanitizedValue]
}
}

/**
* Builds a map with the labels to be set or unset, based on the given metadata.
* The map keys and values are the corresponding label keys and values.
Expand Down
19 changes: 10 additions & 9 deletions src/org/ods/orchestration/BuildStage.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package org.ods.orchestration
import org.ods.orchestration.scheduler.LeVADocumentScheduler
import org.ods.orchestration.usecase.JiraUseCase
import org.ods.orchestration.util.MROPipelineUtil
import org.ods.orchestration.util.PipelinePhaseLifecycleStage
import org.ods.orchestration.util.Project
import org.ods.services.ServiceRegistry
import org.ods.util.PipelineSteps
Expand All @@ -28,7 +29,7 @@ class BuildStage extends Stage {
def phase = MROPipelineUtil.PipelinePhases.BUILD

def preExecuteRepo = { steps_, repo ->
levaDocScheduler.run(phase, MROPipelineUtil.PipelinePhaseLifecycleStage.PRE_EXECUTE_REPO, repo)
levaDocScheduler.run(phase, PipelinePhaseLifecycleStage.PRE_EXECUTE_REPO, repo)
}

def postExecuteRepo = { steps_, repo ->
Expand All @@ -40,7 +41,11 @@ class BuildStage extends Stage {
&& repo.type?.toLowerCase() == MROPipelineUtil.PipelineConfig.REPO_TYPE_ODS_CODE) {
def data = [ : ]
def resultsResurrected = !!repo.data.openshift.resurrectedBuild
if (!resultsResurrected) {
if (resultsResurrected) {
logger.info("[${repo.id}] Resurrected tests from run " +
"${repo.data.openshift.resurrectedBuild} " +
"- no unit tests results will be reported")
} else {
data << [tests: [unit: getTestResults(steps, repo) ]]
jira.reportTestResultsForComponent(
"Technology-${repo.id}",
Expand All @@ -51,16 +56,12 @@ class BuildStage extends Stage {
// return immediatly when no jira adapter is configured).
// this will set failedTests if any xunit tests have failed
util.warnBuildIfTestResultsContainFailure(data.tests.unit.testResults)
} else {
logger.info("[${repo.id}] Resurrected tests from run " +
"${repo.data.openshift.resurrectedBuild} " +
"- no unit tests results will be reported")
}

logger.info("levaDocScheduler.run start")
levaDocScheduler.run(
phase,
MROPipelineUtil.PipelinePhaseLifecycleStage.POST_EXECUTE_REPO,
PipelinePhaseLifecycleStage.POST_EXECUTE_REPO,
repo,
data
)
Expand All @@ -69,7 +70,7 @@ class BuildStage extends Stage {
}

Closure generateDocuments = {
levaDocScheduler.run(phase, MROPipelineUtil.PipelinePhaseLifecycleStage.POST_START)
levaDocScheduler.run(phase, PipelinePhaseLifecycleStage.POST_START)
}

// Execute phase for each repository
Expand All @@ -82,7 +83,7 @@ class BuildStage extends Stage {
}
}
executeInParallel(executeRepos, generateDocuments)
levaDocScheduler.run(phase, MROPipelineUtil.PipelinePhaseLifecycleStage.PRE_END)
levaDocScheduler.run(phase, PipelinePhaseLifecycleStage.PRE_END)

// in case of WIP we fail AFTER all pieces have been executed - so we can report as many
// failed unit tests as possible
Expand Down
Loading