Skip to content

Commit

Permalink
Merge pull request #4 from brettswift/feature/fix_acceptance_tests
Browse files Browse the repository at this point in the history
Acceptance / integration test of the pipeline works
  • Loading branch information
brettswift committed Aug 28, 2018
2 parents 2bd9e01 + 64d5038 commit 6a95251
Show file tree
Hide file tree
Showing 8 changed files with 180 additions and 20 deletions.
2 changes: 1 addition & 1 deletion cumulus/policies/codebuild.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import awacs.logs
import awacs.iam
import awacs.s3
from awacs import ecr
import awacs.ecr

from troposphere import iam

Expand Down
9 changes: 7 additions & 2 deletions cumulus/steps/development/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,8 +176,13 @@ def handle(self, chain_context):
)
)

pipeline_output = troposphere.Output(
"PipelineLogicalName",
Description="Pipeline (logical id)",
Value=Ref(generic_pipeline),
)

chain_context.template.add_resource(pipeline_bucket)
chain_context.template.add_resource(pipeline_service_role)
chain_context.template.add_resource(generic_pipeline)

# chain_context.metadata[cumulus.steps.development.META_LAST_STAGE_OUTPUT] = SOURCE_STAGE_OUTPUT_NAME
chain_context.template.add_output(pipeline_output)
3 changes: 2 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@
'flake8',
'pytest-watch',
'pytest-cov',
'coveralls'
'coveralls',
'awscli'
]

extras = {
Expand Down
108 changes: 96 additions & 12 deletions tests/stacker_test/blueprints/pipeline_simple.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,119 @@
from cumulus.chain import chain, chaincontext
from cumulus.steps import development
import troposphere
from stacker.blueprints.base import Blueprint
import troposphere.codebuild

from cumulus.chain import chain, chaincontext
from cumulus.steps.development import pipeline, code_build_action, pipeline_stage, pipeline_source_action
from cumulus.steps.development.approval_action import ApprovalAction


class PipelineSimple(Blueprint):
"""
An example development that doesn't do anything interesting.
"""

VARIABLES = {
# 'git-commit': {'type': basestring, 'description': 'git version'},
}

def create_template(self):

t = self.template
t.add_description("development spike for dtf")

instance = self.name + self.context.environment['env']

# TODO: give to builder
the_chain = chain.Chain()
the_chain.add(development.Pipeline(name="uptime-dev"))
# bucket becomes: cumulus-acceptance-tests-123123-namespace
pipeline_bucket_name = troposphere.Join('', [
self.context.namespace,
"-",
troposphere.Ref("AWS::AccountId"),
"-",
"automatedtests"
])

the_chain.add(pipeline.Pipeline(
name=self.name,
bucket_name=pipeline_bucket_name,
))

source_stage_name = "SourceStage"
deploy_stage_name = "DeployStage"
service_artifact = "ServiceArtifact"

the_chain.add(
pipeline_stage.PipelineStage(stage_name=source_stage_name)
)

the_chain.add(
pipeline_source_action.PipelineSourceAction(
action_name="MicroserviceSource",
output_artifact_name=service_artifact,
s3_bucket_name=pipeline_bucket_name,
s3_object_key="artifact.tar.gz"
)
)

the_chain.add(
pipeline_stage.PipelineStage(
stage_name=deploy_stage_name,
),
)

the_chain.add(code_build_action.CodeBuildAction(
action_name="DeployMyStuff",
stage_name_to_add=deploy_stage_name,
input_artifact_name=service_artifact,
))

test_env = troposphere.codebuild.Environment(
ComputeType='BUILD_GENERAL1_SMALL',
Image='aws/codebuild/golang:1.10',
Type='LINUX_CONTAINER',
EnvironmentVariables=[
{'Name': 'URL', 'Value': "https://google.ca"}
],
)

inline_echo_url_spec = """version: 0.2
phases:
build:
commands:
- echo $URL
"""

the_chain.add(code_build_action.CodeBuildAction(
action_name="NotificationSmokeTest",
stage_name_to_add=deploy_stage_name,
input_artifact_name=service_artifact,
environment=test_env,
buildspec='buildspec_smoke_test.yml',
))

destroy_stage_name = "EchoAURL"
the_chain.add(
pipeline_stage.PipelineStage(
stage_name=destroy_stage_name,
),
)

# Example usage if you have a VPC
# vpc_config = development.VpcConfig(
# vpc_id='',
# subnets=[
# 'subnet-1',
# ]
# )
the_chain.add(ApprovalAction(
action_name="ApproveDestruction",
stage_name_to_add=destroy_stage_name
))

the_chain.add(development.CodeBuildStage()) # This should hopefully be more valuable, context maybe!
the_chain.add(code_build_action.CodeBuildAction(
action_name="DestroyRocketChat",
stage_name_to_add=destroy_stage_name,
input_artifact_name=service_artifact,
buildspec=inline_echo_url_spec,
))

chain_context = chaincontext.ChainContext(
template=t,
instance_name=self.name
instance_name=instance
)

the_chain.run(chain_context)
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# Used for your dev environment:
# http://stacker.readthedocs.io/en/latest/environments.html
namespace: int-test-dev
namespace: acc
env: ac
11 changes: 11 additions & 0 deletions tests/stacker_test/delete_bucket_versions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import boto3
import sys

bucket_name = str(sys.argv[1])

print("deleting bucket %s " % bucket_name)

session = boto3.Session()
s3 = session.resource(service_name='s3')
bucket = s3.Bucket(bucket_name)
bucket.object_versions.delete()
62 changes: 60 additions & 2 deletions tests/stacker_test/run-integration.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,62 @@
#!/usr/bin/env bash

stacker build conf/test.env stacker.yaml
stacker destroy conf/test.env stacker.yaml --force
ACCOUNT_ID=`aws sts get-caller-identity | jq .Account | tr -d '"' `
NAMESPACE=acc # must match the namespace in the conf file
BUCKET="cumulus-${NAMESPACE}-${ACCOUNT_ID}-automatedtests"

echo "Using account: ${ACCOUNT_ID}"
echo "Using bucket: ${BUCKET}"

set -e #Important. Script will exit appropriately if there is an error.

stacker build conf/acceptance.env stacker.yaml --recreate-failed -t

ARTIFACT_NAME='artifact.tar.gz'
TEMP_DIR='ac_build'

pushd ../../ # move to main folder
mkdir -p ${TEMP_DIR}
zip -r ${TEMP_DIR}/${ARTIFACT_NAME} ./ -x *.git* *./${TEMP_DIR}* *.eggs* *.idea* *.tox*

aws s3 cp ./${TEMP_DIR}/${ARTIFACT_NAME} s3://${BUCKET}/${ARTIFACT_NAME}

rm -rf ${TEMP_DIR}
popd # return to test folder

# TODO: wait for pipeline
PIPELINE_NAME=$(stacker info conf/acceptance.env stacker.yaml 2>&1 | grep PipelineLogicalName | cut -f 3 -d " ")

echo "Waiting for pipeline: ${PIPELINE_NAME}"

# Get status from each stage in the pipeline
pipeline_state=$(aws codepipeline get-pipeline-state --name ${PIPELINE_NAME} | jq -r '.stageStates[] | "\(.stageName) \(.latestExecution.status)"')

# get shasum from expected and actual output. When they match we are at approval state
expected_pipeline_state=$(echo -e "SourceStage Succeeded\nDeployStage Succeeded\nEchoAURL null" | shasum)
actual_pipeline_state=$(echo ${pipeline_state} | shasum)

set +e # don't exit with a failure, let the loop continue
end=$((SECONDS+180))
pipeline_result=0
while [ $SECONDS -lt ${end} ]; do
sleep 15
if [[ ${expected_pipeline_state} == ${actual_pipeline_state} ]] ; then
echo "Pipeline Succeeded to approval step!"
break;
else
if [[ ${pipeline_state} = *"Failed"* ]]; then
echo "Pipeline Failed."
pipeline_result=1
break;
fi
fi
done

aws s3 rm s3://${BUCKET} --recursive
python delete_bucket_versions.py ${BUCKET}

stacker destroy conf/acceptance.env stacker.yaml --force -t

echo "Completed As Expected!"

exit ${pipeline_result} #
2 changes: 1 addition & 1 deletion tests/stacker_test/stacker.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# This file is used in an integration test for this project.

namespace: ${namespace}
namespace: cumulus-${namespace}

stacker_bucket: bswift-spike

Expand Down

0 comments on commit 6a95251

Please sign in to comment.