Skip to content

Commit

Permalink
feat(core): add save pipelines stage (#2715)
Browse files Browse the repository at this point in the history
* feat(core): add save pipelines stage

This stage will be used to extract pipelines from an artifact and save them. Spinnaker is a tool for deploying code, so when we treat pipelines as code it makes sense to use Spinnaker to deploy them.
Imagine you have your pipelines in a GitHub repo, and on each build you create an artifact that describes your pipelines. This CircleCI build is an example:
https://circleci.com/gh/claymccoy/canal_example/14#artifacts/containers/0

You can now create a pipleine that is triggered by that build, grab the artifact produced, and (with this new stage) extract the pipelines and save them.
It performs an upsert, where it looks for existing pipelines by app and name and uses the id to update in that case.

The format of the artifact is a JSON object where the top level keys are application names with values that are a list of pipelines for the app. The nested pipeline JSON is standard pipeline JSON. Here is an example:
https://14-171799544-gh.circle-artifacts.com/0/pipelines/pipelines.json

For now this simply upserts every pipeline in the artifact, but in the future it could allow you to specify a subset of apps and pipelines and effectively test and increase the scope of pipeline roll out. It (or a similar stage) could also save pipeline templates in the future as well.

* Use constructors and private fields rather than auto wired

* summarize results of pipeline saves

* Summarize save pipelines results as created, updated, or failed
  • Loading branch information
claymccoy committed Mar 13, 2019
1 parent b549b58 commit 9ae64a9
Show file tree
Hide file tree
Showing 13 changed files with 896 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* Copyright 2019 Pivotal, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.orca.clouddriver.pipeline.pipeline;

import com.netflix.spinnaker.orca.clouddriver.tasks.pipeline.*;
import com.netflix.spinnaker.orca.front50.tasks.MonitorFront50Task;
import com.netflix.spinnaker.orca.front50.tasks.SavePipelineTask;
import com.netflix.spinnaker.orca.pipeline.StageDefinitionBuilder;
import com.netflix.spinnaker.orca.pipeline.TaskNode.Builder;
import com.netflix.spinnaker.orca.pipeline.model.Stage;
import org.springframework.stereotype.Component;

@Component
public class SavePipelinesFromArtifactStage implements StageDefinitionBuilder {

@Override
public void taskGraph(Stage stage, Builder builder) {

builder
.withTask("getPipelinesFromArtifact", GetPipelinesFromArtifactTask.class)
.withLoop(subGraph -> {
subGraph
.withTask("preparePipelineToSaveTask", PreparePipelineToSaveTask.class)
.withTask("savePipeline", SavePipelineTask.class)
.withTask("waitForPipelineSave", MonitorFront50Task.class)
.withTask("checkPipelineResults", CheckPipelineResultsTask.class)
.withTask("checkForRemainingPipelines", CheckForRemainingPipelinesTask.class);
})
.withTask("savePipelinesCompleteTask", SavePipelinesCompleteTask.class);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Copyright 2019 Pivotal, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.orca.clouddriver.tasks.pipeline;

import com.netflix.spinnaker.orca.ExecutionStatus;
import com.netflix.spinnaker.orca.Task;
import com.netflix.spinnaker.orca.TaskResult;
import com.netflix.spinnaker.orca.pipeline.model.Stage;
import org.springframework.stereotype.Component;

@Component
public class CheckForRemainingPipelinesTask implements Task {

@Override
public TaskResult execute(Stage stage) {
final SavePipelinesData savePipelines = stage.mapTo(SavePipelinesData.class);
if (savePipelines.getPipelinesToSave() == null || savePipelines.getPipelinesToSave().isEmpty()) {
return new TaskResult(ExecutionStatus.SUCCEEDED);
}
return new TaskResult(ExecutionStatus.REDIRECT);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
/*
* Copyright 2019 Pivotal, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.orca.clouddriver.tasks.pipeline;

import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.orca.ExecutionStatus;
import com.netflix.spinnaker.orca.Task;
import com.netflix.spinnaker.orca.TaskResult;
import com.netflix.spinnaker.orca.pipeline.model.Stage;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;

@Component
public class CheckPipelineResultsTask implements Task {

private final ObjectMapper objectMapper;

public CheckPipelineResultsTask(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}

@Override
public TaskResult execute(Stage stage) {
final SavePipelineResultsData previousSavePipelineResults = stage.mapTo(SavePipelineResultsData.class);
final SavePipelinesData savePipelinesData = stage.mapTo(SavePipelinesData.class);
final List<PipelineReferenceData> previousCreated = previousSavePipelineResults.getPipelinesCreated();
final List<PipelineReferenceData> previousUpdated = previousSavePipelineResults.getPipelinesUpdated();
final List<PipelineReferenceData> previousFailedToSave = previousSavePipelineResults.getPipelinesFailedToSave();
final SavePipelineResultsData savePipelineResults = new SavePipelineResultsData(
previousCreated == null ? new ArrayList() : previousCreated,
previousUpdated == null ? new ArrayList() : previousUpdated,
previousFailedToSave == null ? new ArrayList() : previousFailedToSave
);

stage.getTasks().stream().filter( task -> task.getName().equals("savePipeline")).findFirst()
.ifPresent(savePipelineTask -> {
final String application = (String) stage.getContext().get("application");
final String pipelineName = (String) stage.getContext().get("pipeline.name");
final String pipelineId = (String) stage.getContext().get("pipeline.id");
final PipelineReferenceData ref = new PipelineReferenceData(application, pipelineName, pipelineId);
if (savePipelineTask.getStatus().isSuccessful()) {
final Boolean isExistingPipeline = (Boolean) Optional.ofNullable(stage.getContext().get("isExistingPipeline"))
.orElse(false);
if (isExistingPipeline) {
savePipelineResults.getPipelinesUpdated().add(ref);
} else {
savePipelineResults.getPipelinesCreated().add(ref);
}
} else {
savePipelineResults.getPipelinesFailedToSave().add(ref);
}
});

final Map<String, ?> output = objectMapper.
convertValue(savePipelineResults, new TypeReference<Map<String, Object>>() {});
return new TaskResult(ExecutionStatus.SUCCEEDED, output);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
/*
* Copyright 2019 Pivotal, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.orca.clouddriver.tasks.pipeline;

import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.io.CharStreams;
import com.netflix.spinnaker.kork.artifacts.model.Artifact;
import com.netflix.spinnaker.kork.core.RetrySupport;
import com.netflix.spinnaker.orca.ExecutionStatus;
import com.netflix.spinnaker.orca.Task;
import com.netflix.spinnaker.orca.TaskResult;
import com.netflix.spinnaker.orca.clouddriver.OortService;
import com.netflix.spinnaker.orca.front50.Front50Service;
import com.netflix.spinnaker.orca.pipeline.model.Stage;
import com.netflix.spinnaker.orca.pipeline.util.ArtifactResolver;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import retrofit.client.Response;

import java.io.*;
import java.util.*;
import java.util.stream.Collectors;

@Component
public class GetPipelinesFromArtifactTask implements Task {

private Logger log = LoggerFactory.getLogger(getClass());
RetrySupport retrySupport = new RetrySupport();

private final Front50Service front50Service;
private final OortService oort;
private final ObjectMapper objectMapper;
private final ArtifactResolver artifactResolver;

public GetPipelinesFromArtifactTask(Front50Service front50Service,
OortService oort,
ObjectMapper objectMapper,
ArtifactResolver artifactResolver) {
this.front50Service = front50Service;
this.oort = oort;
this.objectMapper = objectMapper;
this.artifactResolver = artifactResolver;
}

@Getter
@NoArgsConstructor
@AllArgsConstructor
public static class PipelinesArtifactData {
@JsonProperty("pipelinesArtifactId") private String id;
@JsonProperty("pipelinesArtifact") private Artifact inline;
}

@SuppressWarnings("unchecked")
@Override
public TaskResult execute(Stage stage) {
final PipelinesArtifactData pipelinesArtifact = stage.mapTo(PipelinesArtifactData.class);
Artifact resolvedArtifact = artifactResolver
.getBoundArtifactForStage(stage, pipelinesArtifact.getId(), pipelinesArtifact.getInline());
if (resolvedArtifact == null) {
throw new IllegalArgumentException("No artifact could be bound to '" + pipelinesArtifact.getId() + "'");
}
log.info("Using {} as the pipelines to be saved", pipelinesArtifact);

String pipelinesText = getPipelinesArtifactContent(resolvedArtifact);

Map<String, List<Map>> pipelinesFromArtifact = null;
try {
pipelinesFromArtifact = objectMapper.readValue(pipelinesText, new TypeReference<Map<String, List<Map>>>() {});
} catch (IOException e) {
log.warn("Failure parsing pipelines from {}", pipelinesArtifact, e);
throw new IllegalStateException(e); // forces a retry
}
final Map<String, List<Map>> finalPipelinesFromArtifact = pipelinesFromArtifact;
final Set<String> appNames = pipelinesFromArtifact.keySet();
final List newAndUpdatedPipelines = appNames.stream().flatMap(appName -> {
final List<Map<String, Object>> existingAppPipelines = front50Service.getPipelines(appName);
final List<Map> specifiedAppPipelines = finalPipelinesFromArtifact.get(appName);
return specifiedAppPipelines.stream().map(p -> {
final Map<String, Object> pipeline = p;
pipeline.put("application", appName);
final Optional<Map<String, Object>> matchedExistingPipeline = existingAppPipelines
.stream().filter(existingPipeline -> existingPipeline.get("name").equals(pipeline.get("name"))).findFirst();
matchedExistingPipeline.ifPresent(matchedPipeline -> {
pipeline.put("id", matchedPipeline.get("id"));
});
return pipeline;
}).filter(pipeline -> !pipeline.isEmpty());
}).collect(Collectors.toList());
final SavePipelinesData output = new SavePipelinesData(null, newAndUpdatedPipelines);
return new TaskResult(ExecutionStatus.SUCCEEDED,
objectMapper.convertValue(output, new TypeReference<Map<String, Object>>() {}));
}

private String getPipelinesArtifactContent(Artifact artifact) {
return retrySupport.retry(() -> {
Response response = oort.fetchArtifact(artifact);
InputStream artifactInputStream;
try {
artifactInputStream = response.getBody().in();
} catch (IOException e) {
log.warn("Failure fetching pipelines from {}", artifact, e);
throw new IllegalStateException(e); // forces a retry
}
try (InputStreamReader rd = new InputStreamReader(artifactInputStream)) {
return CharStreams.toString(rd);
} catch (IOException e) {
log.warn("Failure reading pipelines from {}", artifact, e);
throw new IllegalStateException(e); // forces a retry
}
}, 10, 200, true);
}

}

Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
/*
* Copyright 2019 Pivotal, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.orca.clouddriver.tasks.pipeline;

import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;

@Getter
@NoArgsConstructor
@AllArgsConstructor
public class PipelineReferenceData {
private String application;
private String name;
private String id;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
/*
* Copyright 2019 Pivotal, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.orca.clouddriver.tasks.pipeline;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.orca.ExecutionStatus;
import com.netflix.spinnaker.orca.Task;
import com.netflix.spinnaker.orca.TaskResult;
import com.netflix.spinnaker.orca.pipeline.model.Stage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

import java.util.Base64;
import java.util.List;
import java.util.Map;

@Component
public class PreparePipelineToSaveTask implements Task {

private Logger log = LoggerFactory.getLogger(getClass());

private final ObjectMapper objectMapper;

public PreparePipelineToSaveTask(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}

@Override
public TaskResult execute(Stage stage) {
final SavePipelinesData input = stage.mapTo(SavePipelinesData.class);
if (input.getPipelinesToSave() == null || input.getPipelinesToSave().isEmpty()) {
log.info("There are no pipelines to save.");
return new TaskResult(ExecutionStatus.TERMINAL);
}
final Map pipelineData = input.getPipelinesToSave().get(0);
final String pipelineString;
try {
pipelineString = objectMapper.writeValueAsString(pipelineData);
} catch (JsonProcessingException e) {
throw new IllegalStateException(e);
}
final String encodedPipeline = Base64.getEncoder().encodeToString(pipelineString.getBytes());
final List<Map> remainingPipelinesToSave = input.getPipelinesToSave().subList(1, input.getPipelinesToSave().size());
final SavePipelinesData outputSavePipelinesData = new SavePipelinesData(encodedPipeline, remainingPipelinesToSave);
final Map output = objectMapper.convertValue(outputSavePipelinesData, new TypeReference<Map<String, Object>>() {});
output.put("isExistingPipeline", pipelineData.get("id") != null);
return new TaskResult(ExecutionStatus.SUCCEEDED, output);
}

}

0 comments on commit 9ae64a9

Please sign in to comment.