Permalink
Browse files

initial version of launchbox

  • Loading branch information...
0 parents commit 24d85fc95340f884db3d37f009621f0de7c56116 @cosmin committed Jul 3, 2012
@@ -0,0 +1,8 @@
+*.pyc
+*.egg-info
+
+/env
+/build
+/target
+/cache
+/dist
13 LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2012 Simple Finance Technology Corp.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
@@ -0,0 +1,64 @@
+# launchbox
+
+Bundle cookbooks for use by chef-solo.
+
+Launchbox supports transitive dependency resolution, chef version
+contraints and exclusions (for those misbehaved cookbooks that depend
+on more than they should).
+
+## Input
+
+launchbox will look for YAML files under the `roles` and `mixins` in
+the input directory (which defaults to `./src`).
+
+An example role will look like:
+
+```
+ci:
+ include_mixins: [java]
+ jenkins:
+ http_proxy:
+ variant: nginx
+ cookbooks:
+ java: "1.5.1"
+ nginx: "0.101.1"
+ jenkins: "0.6.3"
+ run_list:
+ - java
+ - jenkins
+```
+
+Think of mixins as partial roles for reusability. At run-time
+launchbox will apply all mixins in order and then override any values
+with the data in the role itself.
+
+## Cookbooks
+
+The cookbooks specified for a role will be downloaded from the
+specified web server or S3 bucket.
+
+### S3
+
+To use an S3 remote use `launchbox --bucket your.bucket.name`
+
+In the case of an S3 remote `launchbox` will look for keys of the form `cookbooks/<cookbook>/<version>/<cookbook>.tar.gz` for the cookbook contents, and `cookbooks/<cookbook>/<version>/<cookbook>.json` for the metadata. For determining the available versions `launchbox` will list the bucket and find all available versions.
+
+### HTTP
+
+To use an HTTP(S) remote use `lauchbox --url http://your/bucket/server`
+
+In the case of an HTTP(s) remote `launchbox` will download cookbook data from `http://example.com/cookbooks/<cookbook>/<version>/<cookbook>.tar.gz`, cookbook metadata from `http://example.com/cookbooks/<cookbook>/<version>/<cookbook>.tar.gz` and the list of available cookbook versions from `http://example.com/cookbooks/<cookbook>/versions.json`
+
+## Output
+
+After running launchbox the target folder will contain a JSON file
+with the role metadata and tar.gz containing all the necessary
+cookbooks for that role.
+
+## Usage
+
+For detailed used information run
+
+```
+launchbox -h
+```
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+"""launchbox
+
+Usage:
+ launchbox [options] clean
+ launchbox --bucket=BUCKET [(--access-key=ACCESS_KEY --secret-key=SECRET_KEY)] [options] (deps | package) [<role>]...
+ launchbox --url=URL [options] (deps | package) [<role>]...
+ launchbox -h | --help
+ launchbox -v | --version
+
+Options:
+ -h --help Show this screen.
+ -v --version Show the version.
+
+ -C --cache-dir=CACHE_DIR Directory in which to cache downloaded assets [default: ./cache].
+ --target=TARGET_DIR Directory in which to store the packaged assets [default: ./target].
+ --source=SOURCE_DIR Source directory holding cookbooks and mixing [default: ./src].
+
+ -b --bucket=BUCKET Use an S3 bucket endpoint.
+ -a --access-key=ACCESS_KEY AWS Access Key Id for S3 access. [default: $AWS_ACCESS_KEY_ID].
+ -s --secret-key=SECRET_KEY AWS Secret Access Key for S3 acces. [default: $AWS_SECRET_ACCESS_KEY].
+
+ -u --url=URL URL for a generic HTTP endpoint
+"""
+
+from docopt import docopt
+
+from launchbox import __version__, app
+
+params = docopt(__doc__, version='Launchbox %s' % __version__)
+app.run(params)
@@ -0,0 +1 @@
+__version__ = '0.1'
@@ -0,0 +1,117 @@
+from functools import partial
+import hashlib
+import logging
+from os.path import join
+import sys
+
+from .output import log
+from .osutils import mkdirp
+from .errors import *
+from .cookbooks import *
+from .roles import *
+from .client import CookbookClient, S3CookbookRemote, HttpCookbookRemote
+
+def thrush(data, *fns):
+ ret = data
+ for fn in fns:
+ ret = fn(ret)
+ return ret
+
+def sign(fname):
+ h = hashlib.sha256()
+ with open(fname) as f:
+ while True:
+ buf = f.read(4096)
+ if not buf : break
+ h.update(buf)
+ with open(fname + ".sha256", 'w') as f:
+ f.write(h.hexdigest())
+
+def launchbox(params):
+ """Params is a dictionary that should look like
+
+ {'--access-key': None,
+ '--bucket': 'your.bucket.name',
+ '--cache-dir': './cache',
+ '--help': False,
+ '--secret-key': None,
+ '--source': './src',
+ '--target': './target',
+ '--url': None,
+ '--version': False,
+ '<role>': ['bar'],
+ 'clean': False,
+ 'deps': True,
+ 'package': False
+ }
+ """
+
+ cache_dir = params['--cache-dir']
+ input_dir = params['--source']
+ output_dir = params['--target']
+
+ if params['clean']:
+ shutil.rmtree(cache_dir)
+ shutil.rmtree(output_dir)
+ return 0
+
+ log.level = logging.DEBUG
+ mkdirp(cache_dir)
+ mkdirp(output_dir)
+
+ if params['--bucket']:
+ access_key = params['--access-key']
+ secret_key = params['--secret-key']
+ bucket = params['--bucket']
+ delegate = S3CookbookRemote(access_key, secret_key, bucket)
+ else:
+ delegate = HttpCookbookRemote(params['--url'])
+
+ client = CookbookClient(cache_dir, delegate)
+
+ download = partial(download_cookbooks, client)
+
+
+ role_paths, mixin_paths = find_roles_and_mixins(input_dir)
+
+ all_roles = thrush(mixin(load_data_from_files(role_paths),
+ load_data_from_files(mixin_paths)),
+ transform_cookbooks,
+ transform_runlist)
+ roles = {}
+ if params['<role>']:
+ for role in params['<role>']:
+ roles[role] = all_roles[role]
+ else:
+ roles = all_roles
+
+ if params['deps']:
+ data = {}
+ for name, role in roles.items():
+ cookbooks = role['cookbooks']
+ _, dep_tree = resolve_dependencies(client, cookbooks, allow_not_found=True)
+ data[name] = dep_tree
+ print json.dumps(data, indent=4)
+ return 0
+ elif params['package']:
+ for name, role in roles.items():
+ cookbooks = role.pop('cookbooks')
+ json_fname = join(output_dir, "%s.json" % name)
+ tarball_fname = join(output_dir, "%s.tar.gz" % name)
+ dump_to_json(role, json_fname)
+ package(extract(download(cookbooks)), tarball_fname)
+ sign(json_fname)
+ sign(tarball_fname)
+
+
+
+def run(params):
+ if params['--access-key'] == '$AWS_ACCESS_KEY_ID':
+ params['--access-key'] = os.environ['AWS_ACCESS_KEY_ID']
+ if params['--secret-key'] == '$AWS_SECRET_ACCESS_KEY':
+ params['--secret-key'] = os.environ['AWS_SECRET_ACCESS_KEY']
+ try:
+ sys.exit(launchbox(params))
+ except Error as e:
+ log.error('ERROR: %s', e.message)
+ sys.exit(128)
@@ -0,0 +1,141 @@
+import os
+from os.path import dirname
+import json
+
+import boto
+import requests
+
+from launchbox.osutils import mkdirp
+from launchbox.errors import *
+from launchbox.output import log
+
+def urljoin(*args):
+ ret = args[0]
+ for arg in args[1:]:
+ if ret.endswith('/') and arg.startswith('/'):
+ ret += arg[1:]
+ elif not (ret.endswith('/') or arg.startswith('/')):
+ ret = ret + '/' + arg
+ else:
+ ret += arg
+ return ret
+
+class CookbookClient(object):
+ def __init__(self, download_cache, delegate):
+ """ A delegate must implement the methods specified in
+ CookbookClientRemoteDelegate """
+
+ self.delegate = delegate
+ self.download_cache = download_cache
+
+ def _path(self, name, version, ext):
+ return urljoin(self.download_cache, name, version, name + ext)
+
+ def download_if_not_found(self, download_path, method, *args, **kw):
+ if not os.path.isfile(download_path):
+ mkdirp(dirname(download_path))
+ with open(download_path, 'w') as f:
+ try:
+ method(f, *args, **kw)
+ except Exception as e:
+ f.close()
+ os.remove(download_path)
+ raise e
+ return download_path
+
+ def get_cookbook(self, name, version):
+ """ returns the filename from where the cookbook can be read
+ as a .tar.gz """
+ path = self._path(name, version, '.tar.gz')
+ return self.download_if_not_found(path, self.delegate.save_cookbook_to, name, version)
+
+ def get_cookbook_metadata(self, name, version):
+ """ returns the cookbook metadata """
+ path = self._path(name, version, '.json')
+ if self.download_if_not_found(path, self.delegate.save_cookbook_metadata_to, name, version):
+ with open(path, 'r') as f:
+ return json.load(f)
+
+ def get_cookbook_versions(self, name):
+ """ returns the list of available versions for this cookbook
+ """
+ path = urljoin(self.download_cache, name, 'versions.json')
+ if self.download_if_not_found(path, self.delegate.save_cookbook_versions_to, name):
+ with open(path, 'r') as f:
+ return json.load(f)
+
+
+class CookbookClientRemoteDelegate(object):
+ """ this class pretends to be an interface in order to document
+ the necessary methods """
+
+ def save_cookbook_to(self, name, version, f):
+ pass
+
+ def save_cookbook_metadata_to(self, name, version, f):
+ pass
+
+ def get_available_cookbooks_versions(self, name):
+ pass
+
+
+class S3CookbookRemote(CookbookClientRemoteDelegate):
+ """ fetches cookbooks from an S3 bucket, stored as
+ cookbooks/name/version/name.(tar.gz|json)"""
+
+ def __init__(self, access_key, secret_key, bucket_name):
+ self.s3 = boto.connect_s3(access_key, secret_key)
+ self.bucket = self.s3.get_bucket(bucket_name)
+
+ def _key(self, name, version, ext):
+ return urljoin('cookbooks', name, version, name + ext)
+
+ def save_cookbook_to(self, f, name, version):
+ key = self.bucket.get_key(self._key(name, version, '.tar.gz'))
+ if not key:
+ raise CookbookTarballNotFoundError("Cannot find %s:%s" % (name, version))
+ key.get_contents_to_file(f)
+
+ def save_cookbook_metadata_to(self, f, name, version):
+ key = self.bucket.get_key(self._key(name, version, '.json'))
+ if not key:
+ raise CookbookMetadataNotFoundError("no metadata for %s:%s" % (name, version))
+ key.get_contents_to_file(f)
+
+ def save_cookbook_versions_to(self, f, name):
+ """ Lists all the keys in cookbooks/<name> and extracts the
+ found versions.
+
+ This is done in order to avoid needing a
+ versions file in S3 which might be hard to update
+ atomatically."""
+ prefix = urljoin('cookbooks', name)
+ versions = set()
+ for key in self.bucket.get_all_keys(prefix=prefix):
+ ver = key.name.split('/')[2]
+ versions.add(ver)
+ json.dump(sorted(versions), f)
+
+
+class HttpCookbookRemote(CookbookClientRemoteDelegate):
+ def __init__(self, base_uri):
+ self.base_uri = base_uri
+
+ def _download(self, url, f):
+ response = requests.get(url)
+ if not response.ok:
+ raise RequestError("request for %s failed" % f)
+ for chunk in response.iter_content():
+ f.write(chunk)
+
+ def _url(self, name, version, ext):
+ return urljoin(self.base_uri, name, version, name + ext)
+
+ def save_cookbook_to(self, f, name, version):
+ self._download(self._url(name, version, '.tar.gz'), f)
+
+ def save_cookbook_metadata_to(self, f, name, version):
+ self._download(self._url(name, version, '.json'), f)
+
+ def save_cookbook_versions_to(self, f, name):
+ self._download(urljoin(self.base_uri, name, 'versions.json'), f)
Oops, something went wrong.

0 comments on commit 24d85fc

Please sign in to comment.