Skip to content

Commit

Permalink
fix(pipelines): expectedArtifacts inheritance from dynamic parent tem…
Browse files Browse the repository at this point in the history
…plates (#3023)

When expected artifacts are defined in a template that is sourced dynamically,
expected artifacts are not added to the execution.

Expected artifacts can be defined in `TemplatedPipelineRequest`,
`TemplateConfiguration` and `PipelineTemplate` objects, so a merge strategy
is required. Given expected artifacts have a unique `id`, artifacts with
same `id` in these object will be overridden. The artifacts with higher
priority are the ones in `TemplatedPipelineRequest`, then `TemplateConfiguration`
and finally `PipelineTemplate`.
  • Loading branch information
xabierlaiseca authored and ezimanyi committed Jul 26, 2019
1 parent 04a037b commit 511bff2
Show file tree
Hide file tree
Showing 27 changed files with 366 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -100,34 +100,36 @@ class DependentPipelineStarter implements ApplicationContextAware {

def trigger = pipelineConfig.trigger
//keep the trigger as the preprocessor removes it.
def expectedArtifacts = pipelineConfig.expectedArtifacts

if (parentPipelineStageId != null) {
pipelineConfig.receivedArtifacts = artifactResolver?.getArtifacts(parentPipeline.stageById(parentPipelineStageId))
} else {
pipelineConfig.receivedArtifacts = artifactResolver?.getAllArtifacts(parentPipeline)
}

def artifactError = null
try {
artifactResolver?.resolveArtifacts(pipelineConfig)
} catch (Exception e) {
artifactError = e
}
// This is required for template source with jinja expressions
trigger.artifacts = pipelineConfig.receivedArtifacts

for (ExecutionPreprocessor preprocessor : executionPreprocessors.findAll {
it.supports(pipelineConfig, ExecutionPreprocessor.Type.PIPELINE)
}) {
pipelineConfig = preprocessor.process(pipelineConfig)
}

pipelineConfig.trigger = trigger

def artifactError = null

try {
artifactResolver?.resolveArtifacts(pipelineConfig)
} catch (Exception e) {
artifactError = e
}

if (pipelineConfig.errors != null) {
throw new ValidationException("Pipeline template is invalid", pipelineConfig.errors as List<Map<String, Object>>)
}

pipelineConfig.trigger = trigger
pipelineConfig.expectedArtifacts = expectedArtifacts

// Process the raw trigger to resolve any expressions before converting it to a Trigger object, which will not be
// processed by the contextParameterProcessor (it only handles Maps, Lists, and Strings)
Map processedTrigger = contextParameterProcessor.process([trigger: pipelineConfig.trigger], [trigger: pipelineConfig.trigger], false).trigger
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -584,4 +584,140 @@ class DependentPipelineStarterSpec extends Specification {
result.stages[0].refId == "wait1"
result.stages[0].name == "Wait for 5 seconds"
}

def "should trigger v1 templated pipelines with dynamic source using inherited expectedArtifacts"() {
given:
def triggeredPipelineConfig = [
id: "triggered",
type: "templatedPipeline",
config: [
configuration: [
inherit: ["expectedArtifacts", "triggers"]
],
pipeline: [
application: "covfefe",
name: "Templated pipeline",
template: [
source: "{% for artifact in trigger.artifacts %}{% if artifact.type == 'spinnaker-pac' && artifact.name == 'wait' %}{{ artifact.reference }}{% endif %}{% endfor %}"
]
],
schema: "1"
]
]
def triggeredPipelineTemplate = mapper.convertValue([
schema: "1",
id: "barebones",
configuration: [
expectedArtifacts: [[
defaultArtifact: [
customKind: true
],
id: "helm-chart",
displayName: "helm-chart",
matchArtifact: [
customKind: true,
type: "http/file",
name: "artifact-name"
],
useDefaultArtifact: false,
usePriorArtifact: false
]]
],
stages: [[
id: "bake-manifest",
type: "bakeManifest",
name: "Bake manifest",
config: [
templateRenderer: "HELM2",
inputArtifacts: [[
account: "my-account",
id: "helm-chart"
]],
expectedArtifacts: [[
id: "baked-manifest",
matchArtifact: [
kind: "base64",
name: "baked-manifest",
type: "embedded/base64"
],
useDefaultArtifact: false
]],
namespace: "a-namespace",
outputName: "baked-manifest"
]
]]

], PipelineTemplate)

Artifact testArtifact = new Artifact(
type: "http/file",
name: "artifact-name",
customKind: true,
reference: "a-reference"
)
def parentPipeline = pipeline {
name = "parent"
trigger = new DefaultTrigger("webhook", null, "test", [:], [testArtifact])
authentication = new Execution.AuthenticationDetails("parentUser", "acct1", "acct2")
}
def executionLauncher = Mock(ExecutionLauncher)
def templateLoader = Mock(TemplateLoader)
def applicationContext = new StaticApplicationContext()
def renderer = new JinjaRenderer(mapper, Mock(Front50Service), [])
def registry = new NoopRegistry()
def parameterProcessor = new ContextParameterProcessor()
def pipelineTemplatePreprocessor = new PipelineTemplatePreprocessor(
mapper,
new SchemaVersionHandler(
new V1SchemaHandlerGroup(
templateLoader,
renderer,
mapper,
registry),
Mock(V2SchemaHandlerGroup)),
new PipelineTemplateErrorHandler(),
registry)
applicationContext.beanFactory.registerSingleton("pipelineLauncher", executionLauncher)
dependentPipelineStarter = new DependentPipelineStarter(
applicationContext,
mapper,
parameterProcessor,
Optional.of([pipelineTemplatePreprocessor] as List<ExecutionPreprocessor>),
Optional.of(artifactResolver),
registry
)

and:
def execution
1 * executionLauncher.start(*_) >> {
execution = it[0]
execution = mapper.readValue(it[1], Map)
return pipeline {
JavaType type = mapper.getTypeFactory().constructCollectionType(List, Stage)
trigger = mapper.convertValue(execution.trigger, Trigger)
stages.addAll(mapper.convertValue(execution.stages, type))
}
}
1 * templateLoader.load(_ as TemplateConfiguration.TemplateSource) >> [triggeredPipelineTemplate]

artifactResolver.getArtifactsForPipelineId(*_) >> {
return new ArrayList<>()
}

when:
def result = dependentPipelineStarter.trigger(
triggeredPipelineConfig,
null /*user*/,
parentPipeline,
[:],
null,
buildAuthenticatedUser("user", [])
)

then:
execution.expectedArtifacts.size() == 1
execution.expectedArtifacts[0].id == "helm-chart"
result.trigger.resolvedExpectedArtifacts.size() == 1
result.trigger.resolvedExpectedArtifacts[0].id == "helm-chart"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
package com.netflix.spinnaker.orca.pipelinetemplate;

import com.fasterxml.jackson.annotation.JsonProperty;
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact;
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.model.NamedHashMap;
import java.util.ArrayList;
import java.util.HashMap;
Expand All @@ -27,7 +26,7 @@ public class TemplatedPipelineRequest {
String id;
String schema;
String type;
List<ExpectedArtifact> expectedArtifacts;
List<Map<String, Object>> expectedArtifacts;
Map<String, Object> trigger = new HashMap<>();
Map<String, Object> config;
Map<String, Object> template;
Expand Down Expand Up @@ -130,11 +129,11 @@ public boolean isKeepWaitingPipelines() {
return keepWaitingPipelines;
}

public void setExpectedArtifacts(List<ExpectedArtifact> expectedArtifacts) {
public void setExpectedArtifacts(List<Map<String, Object>> expectedArtifacts) {
this.expectedArtifacts = expectedArtifacts;
}

public List<ExpectedArtifact> getExpectedArtifacts() {
public List<Map<String, Object>> getExpectedArtifacts() {
return this.expectedArtifacts;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ public Map<String, Object> generate(
addNotifications(pipeline, template, configuration);
addParameters(pipeline, template, configuration);
addTriggers(pipeline, template, configuration);
addExpectedArtifacts(pipeline, template, configuration, request);

pipeline.put(
"stages",
Expand Down Expand Up @@ -95,9 +96,6 @@ public Map<String, Object> generate(
if (request.getTrigger() != null && !request.getTrigger().isEmpty()) {
pipeline.put("trigger", request.getTrigger());
}
if (request.getExpectedArtifacts() != null) {
pipeline.put("expectedArtifacts", request.getExpectedArtifacts());
}

return pipeline;
}
Expand Down Expand Up @@ -155,4 +153,39 @@ private void addTriggers(
.orElse(Collections.emptyList()));
}
}

private void addExpectedArtifacts(
Map<String, Object> pipeline,
PipelineTemplate template,
TemplateConfiguration configuration,
TemplatedPipelineRequest request) {

List<Map<String, Object>> mergedExpectedArtifacts =
mergeExpectedArtifact(
emptyListIfNull(configuration.getConfiguration().getExpectedArtifacts()),
emptyListIfNull(request.getExpectedArtifacts()));

if (configuration.getConfiguration().getInherit().contains("expectedArtifacts")) {
mergedExpectedArtifacts =
mergeExpectedArtifact(
emptyListIfNull(template.getConfiguration().getExpectedArtifacts()),
mergedExpectedArtifacts);
}

pipeline.put("expectedArtifacts", mergedExpectedArtifacts);
}

private List<? extends Map<String, Object>> emptyListIfNull(
List<? extends Map<String, Object>> eas) {
return Optional.ofNullable(eas).orElse(Collections.emptyList());
}

private List<Map<String, Object>> mergeExpectedArtifact(
List<? extends Map<String, Object>> eas1, List<? extends Map<String, Object>> eas2) {
HashMap<String, Map<String, Object>> mergedByName = new HashMap<>();

eas1.forEach(artifact -> mergedByName.put(String.valueOf(artifact.get("id")), artifact));
eas2.forEach(artifact -> mergedByName.put(String.valueOf(artifact.get("id")), artifact));
return new ArrayList<>(mergedByName.values());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,8 @@ class PipelineTemplatePipelinePreprocessorSpec extends Specification {
job: "job",
buildNumber: 1111
],
triggers: []
triggers: [],
expectedArtifacts: []
]
assertReflectionEquals(expected, result, ReflectionComparatorMode.IGNORE_DEFAULTS)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ package com.netflix.spinnaker.orca.pipelinetemplate.v1schema

import com.netflix.spinnaker.orca.pipelinetemplate.TemplatedPipelineRequest
import com.netflix.spinnaker.orca.pipelinetemplate.generator.ExecutionGenerator

import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.model.NamedHashMap
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.model.PipelineTemplate
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.model.PipelineTemplate.Configuration
Expand Down Expand Up @@ -114,6 +115,79 @@ class V1SchemaExecutionGeneratorSpec extends Specification {
[] || ['email-from-configuration@spinnaker.io']
}

@Unroll
def "should set expected artifacts in execution json"() {
given:
PipelineTemplate template = getPipelineTemplate()
template.configuration.setExpectedArtifacts([createExpectedArtifact('artifact-from-template')])

TemplateConfiguration configuration = new TemplateConfiguration(
pipeline: new PipelineDefinition([application: 'orca', pipelineConfigId: 'pipelineConfigId']),
configuration: new TemplateConfiguration.PipelineConfiguration(
inherit: inherit,
expectedArtifacts: [
createExpectedArtifact('artifact-from-configuration')
]
)
)

when:
def request = new TemplatedPipelineRequest(id: "pipelineConfigId")
if (artifactInRequest) {
request.setExpectedArtifacts([createExpectedArtifact('artifact-from-request')])
}
def result = subject.generate(template, configuration, request)


then:
result.expectedArtifacts*.id == expectedArtifactIds

where:
inherit | artifactInRequest || expectedArtifactIds
['expectedArtifacts'] | false || ['artifact-from-template', 'artifact-from-configuration']
[] | false || ['artifact-from-configuration']
[] | true || ['artifact-from-request', 'artifact-from-configuration']
['expectedArtifacts'] | true || ['artifact-from-template', 'artifact-from-request', 'artifact-from-configuration']
}

@Unroll
def "should override expected artifacts in execution json"() {
given:
PipelineTemplate template = getPipelineTemplate()
if (artifactInTemplate) {
template.configuration.setExpectedArtifacts([createExpectedArtifact('artifact', 'from-template')])
}

TemplateConfiguration configuration = new TemplateConfiguration(
pipeline: new PipelineDefinition([application: 'orca', pipelineConfigId: 'pipelineConfigId']),
configuration: new TemplateConfiguration.PipelineConfiguration(
inherit: ['expectedArtifacts']
)
)

if (artifactInConfiguration) {
configuration.getConfiguration().setExpectedArtifacts([createExpectedArtifact('artifact', 'from-configuration')])
}

when:
def request = new TemplatedPipelineRequest(id: "pipelineConfigId")
if (artifactInRequest) {
request.setExpectedArtifacts([createExpectedArtifact('artifact', 'from-request')])
}
def result = subject.generate(template, configuration, request)


then:
result.expectedArtifacts*.displayName == expectedArtifactNames

where:
artifactInTemplate | artifactInConfiguration | artifactInRequest || expectedArtifactNames
true | true | true || ['from-request']
true | true | false || ['from-configuration']
true | false | true || ['from-request']
false | true | true || ['from-request']
}

private PipelineTemplate getPipelineTemplate() {
new PipelineTemplate(
id: 'simpleTemplate',
Expand Down Expand Up @@ -162,4 +236,16 @@ class V1SchemaExecutionGeneratorSpec extends Specification {
]
)
}

private HashMap<String, Object> createExpectedArtifact(String id, String name = null) {
def effectiveName = name ?: id
return [
id: id,
displayName: effectiveName,
defaultArtifact: [customKind: true],
matchArtifact: [customKind: true, type: 'http/file', name: effectiveName],
useDefaultArtifact: false,
usePriorArtifact: false
]
}
}
Loading

0 comments on commit 511bff2

Please sign in to comment.