Skip to content

Commit

Permalink
Merge pull request #35 from mhahn/subcommands
Browse files Browse the repository at this point in the history
Support for subcommands within stacker
  • Loading branch information
phobologic committed Aug 6, 2015
2 parents 599d405 + c8e1eef commit 0be8272
Show file tree
Hide file tree
Showing 33 changed files with 1,708 additions and 891 deletions.
8 changes: 6 additions & 2 deletions conf/example.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,12 @@ mappings:
AmiMap:
us-east-1:
NAT: ami-ad227cc4
ubuntu1404: &ubuntu1404 ami-74e27e1c # Setting an anchor
bastion: *ubuntu1404 # Using the anchor above
ubuntu1404: ami-74e27e1c
bastion: ami-74e27e1c
us-west-2:
NAT: ami-290f4119
ubuntu1404: ami-5189a661
bastion: ami-5189a661

vpc_parameters: &vpc_parameters
VpcId: vpc::VpcId # parametrs with ::'s in them refer to <stack>::<Output>
Expand Down
147 changes: 5 additions & 142 deletions scripts/stacker
Original file line number Diff line number Diff line change
@@ -1,146 +1,9 @@
#!/usr/bin/env python
""" Launches or updates cloudformation stacks based on the given config.

The script is smart enough to figure out if anything (the template, or
parameters) has changed for a given stack. If not, it will skip that stack.
Can also pull parameters from other stack's outputs.
"""

import argparse
from collections import Mapping
import copy
import logging
import yaml

from stacker.builder import Builder
from stacker.util import handle_hooks
from stacker.config import parse_config

logger = logging.getLogger()

DEBUG_FORMAT = ('[%(asctime)s] %(levelname)s %(name)s:%(lineno)d'
'(%(funcName)s) - %(message)s')
INFO_FORMAT = ('[%(asctime)s] %(message)s')

ISO_8601 = '%Y-%m-%dT%H:%M:%S'


def get_stack_definitions(config, stack_list):
""" Extract stack definitions from the config.
If no stack_list given, return stack config as is.
"""
if not stack_list:
return config['stacks']
return [s for s in config['stacks'] if s['name'] in stack_list]


class KeyValueAction(argparse.Action):
def __init__(self, option_strings, dest, default=None, nargs=None,
**kwargs):
if nargs:
raise ValueError("nargs not allowed")
default = default or {}
super(KeyValueAction, self).__init__(option_strings, dest, nargs,
default=default, **kwargs)

def __call__(self, parser, namespace, values, option_string=None):
if not isinstance(values, Mapping):
raise ValueError("type must be 'key_value'")
if not getattr(namespace, self.dest):
setattr(namespace, self.dest, {})
getattr(namespace, self.dest).update(values)


def key_value_arg(string):
try:
k, v = string.split("=", 1)
except ValueError:
raise argparse.ArgumentTypeError(
"%s does not match KEY=VALUE format." % string)
return {k: v}


def yaml_file_type(yaml_file):
""" Reads a yaml file and returns the resulting data. """
with open(yaml_file) as fd:
return yaml.load(fd)


def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-r', '--region', default='us-east-1',
help="The AWS region to launch in. Default: "
"%(default)s")
parser.add_argument('-e', '--environment', type=yaml_file_type,
default={},
help="Path to a yaml environment file. The values in "
"the environment file can be used in the stack "
"config as if it were a string.Template type: "
"https://docs.python.org/2/library/string.html"
"#template-strings")
parser.add_argument('-m', '--max-zones', type=int,
help="Gives you the ability to limit the # of zones "
"that resources will be launched in. If not "
"given, then resources will be launched in all "
"available availability zones.")
parser.add_argument('-v', '--verbose', action='count', default=0,
help='Increase output verbosity. May be specified up '
'to twice.')
parser.add_argument("-p", "--parameter", dest="parameters",
metavar="PARAMETER=VALUE", type=key_value_arg,
action=KeyValueAction, default={},
help="Adds parameters from the command line "
"that can be used inside any of the stacks "
"being built. Can be specified more than once.")
parser.add_argument("--stacks", action="append",
metavar="STACKNAME", type=str,
help="Only work on the stacks given. Can be "
"specified more than once. If not specified "
"then stacker will work on all stacks in the "
"config file.")
parser.add_argument('namespace',
help='The namespace for the stack collection. This '
'will be used as the prefix to the '
'cloudformation stacks as well as the s3 bucket '
'where templates are stored.')
parser.add_argument('config', type=argparse.FileType(),
help="The config file where stack configuration is "
"located. Must be in yaml format.")
return parser.parse_args()


def setup_logging(verbosity):
log_level = logging.INFO
log_format = INFO_FORMAT
if verbosity > 0:
log_level = logging.DEBUG
log_format = DEBUG_FORMAT
if verbosity < 2:
logging.getLogger('boto').setLevel(logging.CRITICAL)

return logging.basicConfig(format=log_format, datefmt=ISO_8601,
level=log_level)
from stacker.commands import Stacker

if __name__ == '__main__':
args = parse_args()
setup_logging(args.verbose)
parameters = copy.deepcopy(args.parameters)
config_string = args.config.read()

config = parse_config(config_string, args.environment)

mappings = config['mappings']

builder = Builder(args.region, args.namespace, mappings=mappings,
parameters=parameters)

stack_definitions = get_stack_definitions(config, args.stacks)
stack_names = [s['name'] for s in stack_definitions]
handle_hooks('pre_build', config.get('pre_build', None), args.region,
args.namespace, mappings, parameters)
logger.info("Working on stacks: %s", ', '.join(stack_names))
builder.build(stack_definitions)
handle_hooks('post_build', config.get('post_build', None), args.region,
args.namespace, mappings, parameters)
stacker = Stacker()
args = stacker.parse_args()
stacker.configure(args)
args.run(args)
Empty file added stacker/actions/__init__.py
Empty file.
119 changes: 119 additions & 0 deletions stacker/actions/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
import copy
import logging

import boto

logger = logging.getLogger(__name__)


def stack_template_key_name(blueprint):
return "%s-%s.json" % (blueprint.name, blueprint.version)


def stack_template_url(bucket_name, blueprint):
key_name = stack_template_key_name(blueprint)
return "https://s3.amazonaws.com/%s/%s" % (bucket_name, key_name)


class BaseAction(object):

def __init__(self, context, provider=None):
self.context = context
self.provider = provider
self.bucket_name = 'stacker-%s' % (context.get_fqn(),)
self._conn = None
self._cfn_bucket = None

@property
def s3_conn(self):
if not hasattr(self, '_s3_conn'):
self._s3_conn = boto.connect_s3()
return self._s3_conn

@property
def cfn_bucket(self):
if not getattr(self, '_cfn_bucket', None):
try:
self._cfn_bucket = self.s3_conn.get_bucket(self.bucket_name)
except boto.exception.S3ResponseError, e:
if e.error_code == 'NoSuchBucket':
logger.debug("Creating bucket %s.", self.bucket_name)
self._cfn_bucket = self.s3_conn.create_bucket(self.bucket_name)
elif e.error_code == 'AccessDenied':
logger.exception("Access denied for bucket %s.",
self.bucket_name)
raise
else:
logger.exception("Error creating bucket %s.",
self.bucket_name)
raise
return self._cfn_bucket

def stack_template_url(self, blueprint):
return stack_template_url(self.bucket_name, blueprint)

def s3_stack_push(self, blueprint, force=False):
""" Pushes the rendered blueprint's template to S3.
Verifies that the template doesn't already exist in S3 before
pushing.
Returns the URL to the template in S3.
"""
key_name = stack_template_key_name(blueprint)
template_url = self.stack_template_url(blueprint)
if self.cfn_bucket.get_key(key_name) and not force:
logger.debug("Cloudformation template %s already exists.",
template_url)
return template_url
key = self.cfn_bucket.new_key(key_name)
key.set_contents_from_string(blueprint.rendered)
logger.debug("Blueprint %s pushed to %s.", blueprint.name,
template_url)
return template_url

def execute(self, *args, **kwargs):
self.pre_run(*args, **kwargs)
self.run(*args, **kwargs)
self.post_run(*args, **kwargs)

def pre_run(self, *args, **kwargs):
pass

def run(self, *args, **kwargs):
raise NotImplementedError('Subclass must implement "run" method')

def post_run(self, *args, **kwargs):
pass

def _get_all_stack_names(self, dependency_dict):
return set(
dependency_dict.keys() +
[item for dependencies in dependency_dict.values() for item in dependencies]
)

def get_stack_execution_order(self, dependency_dict):
# copy the dependency_dict since we pop items out of it to get the
# execution order, we don't want to mutate the one passed in
dependencies = copy.deepcopy(dependency_dict)
pending_steps = []
executed_steps = []
stack_names = self._get_all_stack_names(dependencies)
for stack_name in stack_names:
requirements = dependencies.get(stack_name, None)
if not requirements:
dependencies.pop(stack_name, None)
pending_steps.append(stack_name)

while dependencies:
for step in pending_steps:
for stack_name, requirements in dependencies.items():
if step in requirements:
requirements.remove(step)

if not requirements:
dependencies.pop(stack_name)
pending_steps.append(stack_name)
pending_steps.remove(step)
executed_steps.append(step)
return executed_steps + pending_steps

0 comments on commit 0be8272

Please sign in to comment.