Skip to content

Commit

Permalink
fix(bake): Lookup artifact details from all upstream stages (#3011)
Browse files Browse the repository at this point in the history
* fix(bake): Lookup artifact details from all upstream stages

Imagine following setup:
```
Pipeline A
 -> Jenkins job (produces foo.*.deb)
 -> Run Pipeline B
    -> Jenkins job (produces bar.*.deb)
    -> Bake package foo
```

In this case, the package lookup for bake stage will fail to lookup artifact
details for `foo.deb` to pass to the bakery. Thus the bakery is free to pick the
latest artifact matching the name which can be the wrong artifact.

This change allows package look up to traverse up parent pipeline stages, similar
to `FindImage` tasks
  • Loading branch information
marchello2000 committed Jul 2, 2019
1 parent 60ed2ad commit e4a8d3f
Show file tree
Hide file tree
Showing 5 changed files with 272 additions and 65 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class EcsServerGroupCreator implements ServerGroupCreator, DeploymentDetailsAwar
if (imageDescription.fromContext) {
if (stage.execution.type == ExecutionType.ORCHESTRATION) {
// Use image from specific "find image from tags" stage
def imageStage = getAncestors(stage, stage.execution).find {
def imageStage = stage.ancestorsWithParentPipelines().find {
it.refId == imageDescription.stageId && it.context.containsKey("amiDetails")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,17 @@ trait DeploymentDetailsAware {
return null
}

return getAncestors(stage, stage.execution).find {
Stage ancestorWithImage = stage.findAncestor({
def regions = (it.context.region ? [it.context.region] : it.context.regions) as Set<String>
def cloudProviderFromContext = it.context.cloudProvider ?: it.context.cloudProviderType
boolean hasTargetCloudProvider = !cloudProviderFromContext || targetCloudProvider == cloudProviderFromContext
boolean hasTargetRegion = !targetRegion || regions?.contains(targetRegion) || regions?.contains("global")
boolean hasImage = it.context.containsKey("ami") || it.context.containsKey("amiDetails")

return hasImage && hasTargetRegion && hasTargetCloudProvider
}
})

return ancestorWithImage
}

List<Execution> getPipelineExecutions(Execution execution) {
Expand All @@ -88,53 +90,6 @@ trait DeploymentDetailsAware {
return true
}

List<Stage> getAncestors(Stage stage, Execution execution) {
if (stage?.requisiteStageRefIds) {
def previousStages = execution.stages.findAll {
it.refId in stage.requisiteStageRefIds

}
def syntheticStages = execution.stages.findAll {
it.parentStageId in previousStages*.id
}
return (previousStages + syntheticStages) + previousStages.collect { getAncestors(it, execution ) }.flatten()
} else if (stage?.parentStageId) {
def parent = execution.stages.find { it.id == stage.parentStageId }
return ([parent] + getAncestors(parent, execution)).flatten()
} else if (execution.type == PIPELINE) {
def parentPipelineExecution = getParentPipelineExecution(execution)

if (parentPipelineExecution) {
String parentPipelineStageId = (execution.trigger as PipelineTrigger).parentPipelineStageId
Stage parentPipelineStage = parentPipelineExecution.stages?.find {
it.type == "pipeline" && it.id == parentPipelineStageId
}

if (parentPipelineStage) {
return getAncestors(parentPipelineStage, parentPipelineExecution)
} else {
List<Stage> parentPipelineStages = parentPipelineExecution.stages?.collect()?.sort {
a, b -> b.endTime <=> a.endTime
}

if (parentPipelineStages) {
// The list is sorted in reverse order by endTime.
Stage firstStage = parentPipelineStages.last()

return parentPipelineStages + getAncestors(firstStage, parentPipelineExecution)
} else {
// Parent pipeline has no stages.
return getAncestors(null, parentPipelineExecution)
}
}
}

return []
} else {
return []
}
}

private Execution getParentPipelineExecution(Execution execution) {
// The initial stage execution is a Pipeline, and the ancestor executions are Maps.
if (execution.type == PIPELINE && execution.trigger instanceof PipelineTrigger) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TreeTraversingParser;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
Expand All @@ -40,6 +41,7 @@
import java.io.IOException;
import java.io.Serializable;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
Expand Down Expand Up @@ -402,6 +404,122 @@ private List<Stage> ancestorsOnly(Set<String> visited) {
}
}

/**
* Computes all ancestor stages, including those of parent pipelines
*
* @return list of ancestor stages
*/
public Stage findAncestor(Predicate<Stage> predicate) {
return Stage.findAncestor(this, this.execution, predicate);
}

private static Stage findAncestor(Stage stage, Execution execution, Predicate<Stage> predicate) {
Stage matchingStage = null;

if (stage != null && !stage.getRequisiteStageRefIds().isEmpty()) {
List<Stage> previousStages =
execution.getStages().stream()
.filter(s -> stage.getRequisiteStageRefIds().contains(s.getRefId()))
.collect(toList());

Set<String> previousStageIds =
new HashSet<>(previousStages.stream().map(Stage::getId).collect(toList()));
List<Stage> syntheticStages =
execution.getStages().stream()
.filter(s -> previousStageIds.contains(s.getParentStageId()))
.collect(toList());

List<Stage> priorStages = new ArrayList<>();
priorStages.addAll(previousStages);
priorStages.addAll(syntheticStages);

matchingStage = priorStages.stream().filter(predicate).findFirst().orElse(null);

if (matchingStage == null) {
for (Stage s : previousStages) {
matchingStage = findAncestor(s, execution, predicate);

if (matchingStage != null) {
break;
}
}
}
} else if ((stage != null) && !Strings.isNullOrEmpty(stage.getParentStageId())) {
Optional<Stage> parent =
execution.getStages().stream()
.filter(s -> s.getId().equals(stage.getParentStageId()))
.findFirst();

if (!parent.isPresent()) {
throw new IllegalStateException(
"Couldn't find parent of stage "
+ stage.getId()
+ " with parent "
+ stage.getParentStageId());
}

if (predicate.test(parent.get())) {
matchingStage = parent.get();
} else {
matchingStage = findAncestor(parent.get(), execution, predicate);
}
} else if ((execution.getType() == PIPELINE)
&& (execution.getTrigger() instanceof PipelineTrigger)) {
PipelineTrigger parentTrigger = (PipelineTrigger) execution.getTrigger();

Execution parentPipelineExecution = parentTrigger.getParentExecution();
String parentPipelineStageId = parentTrigger.getParentPipelineStageId();

Optional<Stage> parentPipelineStage =
parentPipelineExecution.getStages().stream()
.filter(
s -> s.getType().equals("pipeline") && s.getId().equals(parentPipelineStageId))
.findFirst();

if (parentPipelineStage.isPresent()) {
matchingStage = findAncestor(parentPipelineStage.get(), parentPipelineExecution, predicate);
} else {
List<Stage> parentPipelineStages = new ArrayList<>(parentPipelineExecution.getStages());
parentPipelineStages.sort(
(s1, s2) -> {
if ((s1.endTime == null) && (s2.endTime == null)) {
return 0;
}

if (s1.endTime == null) {
return 1;
}

if (s2.endTime == null) {
return -1;
}

return s1.endTime.compareTo(s2.endTime);
});

if (parentPipelineStages.size() > 0) {
// The list is sorted in reverse order by endTime.
matchingStage = parentPipelineStages.stream().filter(predicate).findFirst().orElse(null);

if (matchingStage == null) {
Stage firstStage = parentPipelineStages.get(0);

if (predicate.test(firstStage)) {
matchingStage = firstStage;
} else {
matchingStage = findAncestor(firstStage, parentPipelineExecution, predicate);
}
}
} else {
// Parent pipeline has no stages.
matchingStage = findAncestor(null, parentPipelineExecution, predicate);
}
}
}

return matchingStage;
}

/** Recursively get all stages that are children of the current one */
public List<Stage> allDownstreamStages() {
List<Stage> children = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@
import java.util.regex.Pattern;

/**
* This class inspects the context of a stage, preceding stages, the trigger, and possibly the
* parent pipeline in order to see if an artifact matching the name(s) specified in the bake stage
* was produced. If so, that version will be used in the bake request. If nothing is found after all
* This class inspects the context of a stage, preceding stages, the trigger, and the parent
* pipeline in order to see if an artifact matching the name(s) specified in the bake stage was
* produced. If so, that version will be used in the bake request. If nothing is found after all
* this searching it is up to the bakery to pull the latest package version.
*
* <p>Artifact information comes from Jenkins on the pipeline trigger in the field
Expand Down Expand Up @@ -385,26 +385,25 @@ private Map<String, Object> filterRPMArtifacts(

private static Map<String, Object> findBuildInfoInUpstreamStage(
Stage currentStage, List<Pattern> packageFilePatterns) {

Stage upstreamStage =
currentStage.ancestors().stream()
.filter(
it -> {
Map<String, Object> buildInfo =
(Map<String, Object>) it.getOutputs().get("buildInfo");
return buildInfo != null
&& artifactMatch(
(List<Map<String, String>>) buildInfo.get("artifacts"),
packageFilePatterns);
})
.findFirst()
.orElse(null);
currentStage.findAncestor(
it -> {
Map<String, Object> buildInfo =
(Map<String, Object>) it.getOutputs().get("buildInfo");
return buildInfo != null
&& artifactMatch(
(List<Map<String, String>>) buildInfo.get("artifacts"), packageFilePatterns);
});

return upstreamStage != null
? (Map<String, Object>) upstreamStage.getOutputs().get("buildInfo")
: emptyMap();
}

private static boolean artifactMatch(
List<Map<String, String>> artifacts, List<Pattern> patterns) {

return artifacts != null
&& artifacts.stream()
.anyMatch(
Expand Down
Loading

0 comments on commit e4a8d3f

Please sign in to comment.