diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000000..55ad5f3c9c --- /dev/null +++ b/.coveragerc @@ -0,0 +1,5 @@ +[run] +omit = + */venv/* + */virtualenv/* +ignore_errors = True diff --git a/.gitignore b/.gitignore index cf845cda37..6e998e290c 100644 --- a/.gitignore +++ b/.gitignore @@ -39,8 +39,11 @@ nosetests.xml # Deis' config file deis/local_settings.py + +# Misc. +.DS_Store +htmlcov/ +.ruby-version venv/ .vagrant -.ruby-version -.DS_Store diff --git a/.travis.yml b/.travis.yml index 6d9a6bdaab..7eced6a41d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,6 +19,7 @@ before_install: install: - "pip install -r requirements.txt --use-mirrors" + - "pip install coveralls --use-mirrors" before_script: - "psql -c 'create database deis_testing;' -U postgres" @@ -34,7 +35,11 @@ before_script: 'PORT': '', } } - SECRET_KEY = ')9$y9(@_r!+ai=ub)wsp2!!vs4i67x7ke9!jaljgf1_@rw421=' + SECRET_KEY = ')9$y9(@_r!+ai=ub)wsp2!!vsfaker7ke9!jaljgf1_@rw421=' EOF -script: "make test" +script: + - coverage run manage.py test api celerytasks web + +after_success: + - coveralls diff --git a/Makefile b/Makefile index bb47cfc5bb..49008172c8 100644 --- a/Makefile +++ b/Makefile @@ -3,15 +3,14 @@ all: db: python manage.py syncdb --noinput + python manage.py migrate test: - python manage.py test api web + python manage.py test api celerytasks web -task: - python manage.py test celerytasks +coverage: + coverage run manage.py test api celerytasks web + coverage html -pep8: - pep8 api celerytasks deis web - -pyflakes: - pyflakes api celerytasks deis web +flake8: + flake8 diff --git a/README.md b/README.md index a303dcb163..f5f98249ee 100644 --- a/README.md +++ b/README.md @@ -6,3 +6,4 @@ Your PaaS. Your Rules. Current status: [![Build Status](https://travis-ci.org/opdemand/deis.png)](https://travis-ci.org/opdemand/deis) +[![Coverage Status](https://coveralls.io/repos/opdemand/deis/badge.png?branch=master)](https://coveralls.io/r/opdemand/deis?branch=master) diff --git a/api/fields.py b/api/fields.py index c0558af8df..092fe8ff4b 100644 --- a/api/fields.py +++ b/api/fields.py @@ -88,6 +88,8 @@ class ParamsField(JSONField): A text field that accepts a JSON object, used for storing provider API Parameters. """ + pass + class CloudInitField(YAMLField): """ @@ -107,7 +109,7 @@ class NodeStatusField(JSONField): try: from south.modelsinspector import add_introspection_rules - # Tell the South schema migration tool to handle a UuidField. + # Tell the South schema migration tool to handle our custom fields. add_introspection_rules([], [r'^api\.fields\.UuidField']) add_introspection_rules([], [r'^api\.fields\.EnvVarsField']) add_introspection_rules([], [r'^api\.fields\.DataBagField']) diff --git a/api/migrations/0001_initial.py b/api/migrations/0001_initial.py index 4e05ae160f..f5c89b4703 100644 --- a/api/migrations/0001_initial.py +++ b/api/migrations/0001_initial.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- + +# flake8: noqa + import datetime from south.db import db from south.v2 import SchemaMigration @@ -459,4 +462,4 @@ def backwards(self, orm): } } - complete_apps = ['api'] \ No newline at end of file + complete_apps = ['api'] diff --git a/api/migrations/0002_auto__del_field_flavor_ssh_username__del_field_flavor_ssh_private_key_.py b/api/migrations/0002_auto__del_field_flavor_ssh_username__del_field_flavor_ssh_private_key_.py index b0eb0ae317..a768781851 100644 --- a/api/migrations/0002_auto__del_field_flavor_ssh_username__del_field_flavor_ssh_private_key_.py +++ b/api/migrations/0002_auto__del_field_flavor_ssh_username__del_field_flavor_ssh_private_key_.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- + +# flake8: noqa + import datetime from south.db import db from south.v2 import SchemaMigration @@ -248,4 +251,4 @@ def backwards(self, orm): } } - complete_apps = ['api'] \ No newline at end of file + complete_apps = ['api'] diff --git a/api/models.py b/api/models.py index 28e931a1fc..19282bb277 100644 --- a/api/models.py +++ b/api/models.py @@ -9,7 +9,10 @@ from __future__ import unicode_literals import importlib import json +import os +import yaml +from celery.canvas import group from django.conf import settings from django.db import models from django.db.models.signals import post_save @@ -19,18 +22,16 @@ from rest_framework.authtoken.models import Token from api import fields -from celerytasks import chef, controller -from celery.canvas import group -import yaml -import os.path +from celerytasks import controller # define custom signals scale_signal = Signal(providing_args=['formation', 'user']) release_signal = Signal(providing_args=['formation', 'user']) + def import_tasks(provider_type): - "Return Celery tasks for a given provider type" + """Return Celery tasks for a given provider type""" try: tasks = importlib.import_module('celerytasks.'+provider_type) except ImportError as e: @@ -199,15 +200,16 @@ def publish(self, **kwargs): 'ssh_keys': {}, 'admins': [], 'formations': {} - } + } # add all ssh keys on the system for key in Key.objects.all(): - key_id = '{0}_{1}'.format(key.owner.username, key.id) + key_id = "{0}_{1}".format(key.owner.username, key.id) databag['ssh_keys'][key_id] = key.public # TODO: add sharing-based key lookup, for now just owner's keys for formation in formations: keys = databag['formations'][formation.id] = [] - owner_keys = [ '{0}_{1}'.format(k.owner.username, k.id) for k in formation.owner.key_set.all() ] + owner_keys = ["{0}_{1}".format( + k.owner.username, k.id) for k in formation.owner.key_set.all()] keys.extend(owner_keys) # call a celery task to update gitosis if settings.CHEF_ENABLED: @@ -243,7 +245,7 @@ class Formation(UuidAuditedModel): id = models.SlugField(max_length=64) layers = fields.JSONField(default='{}', blank=True) containers = fields.JSONField(default='{}', blank=True) - + class Meta: unique_together = (('owner', 'id'),) @@ -325,7 +327,7 @@ def balance(self, **kwargs): if containers_balanced: self.converge(databag) return databag - + def _balance_containers(self, **kwargs): runtime_nodes = self.node_set.filter(layer__id='runtime').order_by('created') if len(runtime_nodes) < 2: @@ -359,7 +361,7 @@ def _balance_containers(self, **kwargs): type=container_type, num=container_num, node=n_under) - container_num +=1 + container_num += 1 # delete the oldest container from the most over-utilized node c = n_over.container_set.filter(type=container_type).order_by('created')[0] c.delete() @@ -369,10 +371,16 @@ def _balance_containers(self, **kwargs): n_map.setdefault(ct, []).append(n) changed = True return changed - + def __str__(self): return self.id - + + def prepare_provider(self, *args, **kwargs): + tasks = import_tasks(self.flavor.provider.type) + args = (self.id, self.flavor.provider.creds.copy(), + self.flavor.params.copy()) + return tasks.prepare_formation.subtask(args) + def calculate(self): "Return a Chef data bag item for this formation" release = self.release_set.all().order_by('-created')[0] @@ -388,7 +396,7 @@ def calculate(self): d['release']['build']['procfile'] = release.build.procfile # calculate proxy d['proxy'] = {} - d['proxy']['algorithm'] = 'round_robin' + d['proxy']['algorithm'] = 'round_robin' d['proxy']['port'] = 80 d['proxy']['backends'] = [] # calculate container formation @@ -397,7 +405,8 @@ def calculate(self): # all container types get an exposed port starting at 5001 port = 5000 + c.num d['containers'].setdefault(c.type, {}) - d['containers'][c.type].update({ c.num: "{0}:{1}".format(c.node.id, port) }) + d['containers'][c.type].update( + {c.num: "{0}:{1}".format(c.node.id, port)}) # only proxy to 'web' containers if c.type == 'web': d['proxy']['backends'].append("{0}:{1}".format(c.node.fqdn, port)) @@ -628,7 +637,7 @@ class Config(UuidAuditedModel): owner = models.ForeignKey(settings.AUTH_USER_MODEL) formation = models.ForeignKey('Formation') version = models.PositiveIntegerField(default=1) - + values = fields.EnvVarsField(default='{}', blank=True) class Meta: @@ -649,7 +658,7 @@ class Build(UuidAuditedModel): owner = models.ForeignKey(settings.AUTH_USER_MODEL) formation = models.ForeignKey('Formation') version = models.PositiveIntegerField(default=1) - + sha = models.CharField('SHA', max_length=255, blank=True) output = models.TextField(blank=True) @@ -678,7 +687,7 @@ class Release(UuidAuditedModel): owner = models.ForeignKey(settings.AUTH_USER_MODEL) formation = models.ForeignKey('Formation') version = models.PositiveIntegerField(default=1) - + config = models.ForeignKey('Config') image = models.CharField(max_length=256, default='ubuntu') # build only required for heroku-style apps @@ -689,7 +698,7 @@ class Meta: def __str__(self): return '{0}-v{1}'.format(self.formation.id, self.version) - + def rollback(self): # create a rollback log entry # call run @@ -700,7 +709,7 @@ def rollback(self): def new_release(sender, **kwargs): formation, user = kwargs['formation'], kwargs['user'] last_release = Release.objects.filter( - formation=formation).order_by('-created')[0] + formation=formation).order_by('-created')[0] image = kwargs.get('image', last_release.image) config = kwargs.get('config', last_release.config) build = kwargs.get('build', last_release.build) @@ -776,4 +785,3 @@ def create_auth_token(sender, instance=None, created=False, **kwargs): if created: # pylint: disable=E1101 Token.objects.create(user=instance) - diff --git a/api/serializers.py b/api/serializers.py index bb37f764dc..e8fdea8990 100644 --- a/api/serializers.py +++ b/api/serializers.py @@ -5,13 +5,11 @@ from __future__ import unicode_literals +from django.contrib.auth.models import User from rest_framework import serializers from api import models, utils -from django.contrib.auth.models import User - - class UserSerializer(serializers.ModelSerializer): @@ -20,8 +18,8 @@ class UserSerializer(serializers.ModelSerializer): class Meta: """Metadata options for a UserSerializer.""" model = User - read_only_fields = ('is_superuser', 'is_staff', 'groups', 'user_permissions', - 'last_login', 'date_joined') + read_only_fields = ('is_superuser', 'is_staff', 'groups', + 'user_permissions', 'last_login', 'date_joined') @property def data(self): @@ -63,7 +61,7 @@ class FlavorSerializer(serializers.ModelSerializer): owner = serializers.Field(source='owner.username') provider = serializers.SlugRelatedField(slug_field='id') - + class Meta: """Metadata options for a FlavorSerializer.""" model = models.Flavor @@ -76,9 +74,9 @@ class ConfigSerializer(serializers.ModelSerializer): owner = serializers.Field(source='owner.username') formation = serializers.SlugRelatedField(slug_field='id') - values = serializers.ModelField(model_field=models.Config()._meta.get_field('values'), - required=False) - + values = serializers.ModelField( + model_field=models.Config()._meta.get_field('values'), required=False) + class Meta: """Metadata options for a ConfigSerializer.""" model = models.Config @@ -91,7 +89,7 @@ class BuildSerializer(serializers.ModelSerializer): owner = serializers.Field(source='owner.username') formation = serializers.SlugRelatedField(slug_field='id') - + class Meta: """Metadata options for a BuildSerializer.""" model = models.Build @@ -106,7 +104,7 @@ class ReleaseSerializer(serializers.ModelSerializer): formation = serializers.SlugRelatedField(slug_field='id') config = serializers.SlugRelatedField(slug_field='uuid') build = serializers.SlugRelatedField(slug_field='uuid', required=False) - + class Meta: """Metadata options for a ReleaseSerializer.""" model = models.Release @@ -119,7 +117,7 @@ class FormationSerializer(serializers.ModelSerializer): owner = serializers.Field(source='owner.username') id = serializers.SlugField(default=utils.generate_app_name) - + class Meta: """Metadata options for a FormationSerializer.""" model = models.Formation @@ -156,7 +154,7 @@ class NodeSerializer(serializers.ModelSerializer): owner = serializers.Field(source='owner.username') formation = serializers.SlugRelatedField(slug_field='id') layer = serializers.SlugRelatedField(slug_field='id') - + class Meta: """Metadata options for a NodeSerializer.""" model = models.Node @@ -170,7 +168,7 @@ class ContainerSerializer(serializers.ModelSerializer): owner = serializers.Field(source='owner.username') formation = serializers.SlugRelatedField(slug_field='id') node = serializers.SlugRelatedField(slug_field='uuid') - + class Meta: """Metadata options for a ContainerSerializer.""" model = models.Container diff --git a/api/tests/__init__.py b/api/tests/__init__.py index 22d6647149..858d32c260 100644 --- a/api/tests/__init__.py +++ b/api/tests/__init__.py @@ -27,14 +27,14 @@ def send_patch(self, path, data='', content_type='application/octet-stream', RequestFactory.patch = construct_patch Client.patch = send_patch -from .auth import * -from .build import * -from .config import * -from .container import * -from .flavor import * -from .formation import * -from .key import * -from .layer import * -from .node import * -from .provider import * -from .release import * +from .auth import * # noqa +from .build import * # noqa +from .config import * # noqa +from .container import * # noqa +from .flavor import * # noqa +from .formation import * # noqa +from .key import * # noqa +from .layer import * # noqa +from .node import * # noqa +from .provider import * # noqa +from .release import * # noqa diff --git a/api/tests/auth.py b/api/tests/auth.py index 999836394a..eefbc8a602 100644 --- a/api/tests/auth.py +++ b/api/tests/auth.py @@ -30,11 +30,16 @@ def test_auth(self): username, password = 'newuser', 'password' first_name, last_name = 'Otto', 'Test' email = 'autotest@deis.io' - submit = {'username': username, 'password': password, - 'first_name': first_name, 'last_name': last_name, - 'email': email, - # try to abuse superuser/staff level perms - 'is_superuser': True, 'is_staff': True} + submit = { + 'username': username, + 'password': password, + 'first_name': first_name, + 'last_name': last_name, + 'email': email, + # try to abuse superuser/staff level perms + 'is_superuser': True, + 'is_staff': True, + } url = '/api/auth/register' response = self.client.post(url, json.dumps(submit), content_type='application/json') self.assertEqual(response.status_code, 201) @@ -55,4 +60,4 @@ def test_auth(self): url = '/api/flavors' response = self.client.get(url) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['count'], 8) # 8 regions + self.assertEqual(response.data['count'], 8) # 8 regions \ No newline at end of file diff --git a/api/tests/build.py b/api/tests/build.py index ccde51fbfa..232b59d61b 100644 --- a/api/tests/build.py +++ b/api/tests/build.py @@ -29,11 +29,14 @@ def setUp(self): response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) url = '/api/flavors' - body = {'id': 'autotest', 'provider': 'autotest', - 'params': json.dumps({'region': 'us-west-2', 'instance_size': 'm1.medium'})} + body = { + 'id': 'autotest', + 'provider': 'autotest', + 'params': json.dumps({'region': 'us-west-2', 'instance_size': 'm1.medium'}), + } response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) - + def test_build(self): """ Test that a null build is created on a new formation, and that users @@ -49,9 +52,13 @@ def test_build(self): response = self.client.get(url) self.assertEqual(response.status_code, 404) # post a first build - body = {'sha': uuid.uuid4().hex, 'slug_size': 4096000, 'procfile': json.dumps({'web': 'node server.js'}), - 'url': 'http://deis.local/slugs/1c52739bbf3a44d3bfb9a58f7bbdd5fb.tar.gz', - 'checksum': uuid.uuid4().hex} + body = { + 'sha': uuid.uuid4().hex, + 'slug_size': 4096000, + 'procfile': json.dumps({'web': 'node server.js'}), + 'url': 'http://deis.local/slugs/1c52739bbf3a44d3bfb9a58f7bbdd5fb.tar.gz', + 'checksum': uuid.uuid4().hex, + } response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) build1 = response.data @@ -63,9 +70,13 @@ def test_build(self): build2 = response.data self.assertEqual(build1, build2) # post a new build - body = {'sha': uuid.uuid4().hex, 'slug_size': 4096000, 'procfile': json.dumps({'web': 'node server.js'}), - 'url': 'http://deis.local/slugs/1c52739bbf3a44d3bfb9a58f7bbdd5fb.tar.gz', - 'checksum': uuid.uuid4().hex} + body = { + 'sha': uuid.uuid4().hex, + 'slug_size': 4096000, + 'procfile': json.dumps({'web': 'node server.js'}), + 'url': 'http://deis.local/slugs/1c52739bbf3a44d3bfb9a58f7bbdd5fb.tar.gz', + 'checksum': uuid.uuid4().hex, + } response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) build3 = response.data @@ -76,4 +87,3 @@ def test_build(self): self.assertEqual(self.client.put(url).status_code, 405) self.assertEqual(self.client.patch(url).status_code, 405) self.assertEqual(self.client.delete(url).status_code, 405) - diff --git a/api/tests/config.py b/api/tests/config.py index e180192dcc..02728c484c 100644 --- a/api/tests/config.py +++ b/api/tests/config.py @@ -33,7 +33,7 @@ def setUp(self): def test_config(self): """ - Test that config is auto-created during a new formation and that + Test that config is auto-created during a new formation and that new versions can be created using a PATCH """ url = '/api/formations' @@ -92,6 +92,3 @@ def test_config(self): self.assertEqual(self.client.put(url).status_code, 405) self.assertEqual(self.client.patch(url).status_code, 405) self.assertEqual(self.client.delete(url).status_code, 405) - - - \ No newline at end of file diff --git a/api/tests/container.py b/api/tests/container.py index 4894021d6e..8b1e4e0656 100644 --- a/api/tests/container.py +++ b/api/tests/container.py @@ -33,7 +33,7 @@ def setUp(self): 'params': json.dumps({'region': 'us-west-2', 'instance_size': 'm1.medium'})} response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) - + def test_container_scale(self): url = '/api/formations' body = {'id': 'autotest'} @@ -101,7 +101,7 @@ def test_container_balance(self): response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 200) # should start with zero - url = '/api/formations/{formation_id}/containers'.format(**locals()) + url = "/api/formations/{formation_id}/containers".format(**locals()) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data['results']), 0) @@ -116,7 +116,7 @@ def test_container_balance(self): response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 200) # calculate the formation - url = '/api/formations/{formation_id}/calculate'.format(**locals()) + url = "/api/formations/{formation_id}/calculate".format(**locals()) response = self.client.post(url) containers = response.data['containers'] # check balance of web types @@ -124,16 +124,16 @@ def test_container_balance(self): for c in containers['web'].values(): backend, port = c.split(':') by_backend.setdefault(backend, []).append(port) - b_min = min([ len(by_backend[b]) for b in by_backend.keys() ]) - b_max = max([ len(by_backend[b]) for b in by_backend.keys() ]) + b_min = min([len(by_backend[b]) for b in by_backend.keys()]) + b_max = max([len(by_backend[b]) for b in by_backend.keys()]) self.assertLess(b_max - b_min, 2) # check balance of worker types by_backend = {} for c in containers['worker'].values(): backend, port = c.split(':') by_backend.setdefault(backend, []).append(port) - b_min = min([ len(by_backend[b]) for b in by_backend.keys() ]) - b_max = max([ len(by_backend[b]) for b in by_backend.keys() ]) + b_min = min([len(by_backend[b]) for b in by_backend.keys()]) + b_max = max([len(by_backend[b]) for b in by_backend.keys()]) self.assertLess(b_max - b_min, 2) # scale up more url = '/api/formations/{formation_id}/scale/containers'.format(**locals()) @@ -141,7 +141,7 @@ def test_container_balance(self): response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 200) # calculate the formation - url = '/api/formations/{formation_id}/calculate'.format(**locals()) + url = "/api/formations/{formation_id}/calculate".format(**locals()) response = self.client.post(url) containers = response.data['containers'] # check balance of web types @@ -149,28 +149,28 @@ def test_container_balance(self): for c in containers['web'].values(): backend, port = c.split(':') by_backend.setdefault(backend, []).append(port) - b_min = min([ len(by_backend[b]) for b in by_backend.keys() ]) - b_max = max([ len(by_backend[b]) for b in by_backend.keys() ]) + b_min = min([len(by_backend[b]) for b in by_backend.keys()]) + b_max = max([len(by_backend[b]) for b in by_backend.keys()]) self.assertLess(b_max - b_min, 2) # check balance of worker types by_backend = {} for c in containers['worker'].values(): backend, port = c.split(':') by_backend.setdefault(backend, []).append(port) - b_min = min([ len(by_backend[b]) for b in by_backend.keys() ]) - b_max = max([ len(by_backend[b]) for b in by_backend.keys() ]) + b_min = min([len(by_backend[b]) for b in by_backend.keys()]) + b_max = max([len(by_backend[b]) for b in by_backend.keys()]) self.assertLess(b_max - b_min, 2) # scale down url = '/api/formations/{formation_id}/scale/containers'.format(**locals()) body = {'web': 2, 'worker': 2} response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 200) - url = '/api/formations/{formation_id}/containers'.format(**locals()) + url = "/api/formations/{formation_id}/containers".format(**locals()) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data['results']), 4) # calculate the formation - url = '/api/formations/{formation_id}/calculate'.format(**locals()) + url = "/api/formations/{formation_id}/calculate".format(**locals()) response = self.client.post(url) containers = response.data['containers'] # check balance of web types @@ -178,14 +178,14 @@ def test_container_balance(self): for c in containers['web'].values(): backend, port = c.split(':') by_backend.setdefault(backend, []).append(port) - b_min = min([ len(by_backend[b]) for b in by_backend.keys() ]) - b_max = max([ len(by_backend[b]) for b in by_backend.keys() ]) + b_min = min([len(by_backend[b]) for b in by_backend.keys()]) + b_max = max([len(by_backend[b]) for b in by_backend.keys()]) self.assertLess(b_max - b_min, 2) # check balance of worker types by_backend = {} for c in containers['worker'].values(): backend, port = c.split(':') by_backend.setdefault(backend, []).append(port) - b_min = min([ len(by_backend[b]) for b in by_backend.keys() ]) - b_max = max([ len(by_backend[b]) for b in by_backend.keys() ]) + b_min = min([len(by_backend[b]) for b in by_backend.keys()]) + b_max = max([len(by_backend[b]) for b in by_backend.keys()]) self.assertLess(b_max - b_min, 2) diff --git a/api/tests/flavor.py b/api/tests/flavor.py index 1240011127..3c9ee4770d 100644 --- a/api/tests/flavor.py +++ b/api/tests/flavor.py @@ -26,7 +26,7 @@ def setUp(self): body = {'id': 'autotest', 'type': 'mock', 'creds': json.dumps(creds)} response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) - + def test_flavor(self): """ Test that a user can create, read, update and delete a node flavor @@ -40,15 +40,13 @@ def test_flavor(self): response = self.client.get('/api/flavors') self.assertEqual(response.status_code, 200) self.assertEqual(len(response.data['results']), 1) - url = '/api/flavors/{flavor_id}'.format(**locals()) + url = "/api/flavors/{flavor_id}".format(**locals()) response = self.client.get(url) self.assertEqual(response.status_code, 200) new_init = {'ssh_authorized_keys': ['ssh-rsa aaaaaaaaa']} - body = {'init': yaml.safe_dump(new_init) } + body = {'init': yaml.safe_dump(new_init)} response = self.client.patch(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertEqual(yaml.safe_load(response.data['init']), new_init) response = self.client.delete(url) self.assertEqual(response.status_code, 204) - - diff --git a/api/tests/formation.py b/api/tests/formation.py index 49c40d2e2d..dd73eef9da 100644 --- a/api/tests/formation.py +++ b/api/tests/formation.py @@ -30,7 +30,7 @@ def setUp(self): 'params': json.dumps({'region': 'us-west-2', 'instance_size': 'm1.medium'})} response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) - + def test_formation(self): """ Test that a user can create, read, update and delete a node formation @@ -53,7 +53,7 @@ def test_formation(self): self.assertEqual(response.status_code, 405) response = self.client.delete(url) self.assertEqual(response.status_code, 204) - + def test_formation_auto_id(self): body = {'id': 'autotest'} response = self.client.post('/api/formations', json.dumps(body), content_type='application/json') @@ -94,7 +94,7 @@ def test_formation_scale_errors(self): response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 400) self.assertEqual(json.loads(response.content), 'Must scale runtime nodes > 0 to host containers') - + def test_formation_actions(self): url = '/api/formations' body = {'id': 'autotest'} @@ -124,4 +124,4 @@ def test_formation_actions(self): self.assertIn('nodes', response.data) self.assertIn('containers', response.data) self.assertIn('proxy', response.data) - self.assertIn('release', response.data) \ No newline at end of file + self.assertIn('release', response.data) diff --git a/api/tests/key.py b/api/tests/key.py index 51889201e3..be110d56a8 100644 --- a/api/tests/key.py +++ b/api/tests/key.py @@ -40,5 +40,3 @@ def test_key(self): self.assertEqual(body['public'], response.data['public']) response = self.client.delete(url) self.assertEqual(response.status_code, 204) - - diff --git a/api/tests/node.py b/api/tests/node.py index 19335b2166..bb8060d949 100644 --- a/api/tests/node.py +++ b/api/tests/node.py @@ -30,7 +30,7 @@ def setUp(self): 'params': json.dumps({'region': 'us-west-2', 'instance_size': 'm1.medium'})} response = self.client.post(url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) - + def test_node(self): """ Test that a user can create, read, update and delete a node @@ -64,4 +64,4 @@ def test_node(self): self.assertEqual(response.status_code, 200) self.assertIn('fqdn', response.data) response = self.client.delete(url) - self.assertEqual(response.status_code, 204) \ No newline at end of file + self.assertEqual(response.status_code, 204) diff --git a/api/tests/provider.py b/api/tests/provider.py index 40f1059f99..cfc9fa3686 100644 --- a/api/tests/provider.py +++ b/api/tests/provider.py @@ -46,5 +46,3 @@ def test_provider(self): self.assertEqual(response.data['type'], 'ec2') response = self.client.delete(url) self.assertEqual(response.status_code, 204) - - diff --git a/api/tests/release.py b/api/tests/release.py index afbe422ec0..73bc012687 100644 --- a/api/tests/release.py +++ b/api/tests/release.py @@ -24,14 +24,22 @@ def setUp(self): url = '/api/providers' creds = {'secret_key': 'x'*64, 'access_key': 1*20} body = {'id': 'autotest', 'type': 'mock', 'creds': json.dumps(creds)} - response = self.client.post(url, json.dumps(body), content_type='application/json') + response = self.client.post( + url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) url = '/api/flavors' - body = {'id': 'autotest', 'provider': 'autotest', - 'params': json.dumps({'region': 'us-west-2', 'instance_size': 'm1.medium'})} - response = self.client.post(url, json.dumps(body), content_type='application/json') + body = { + 'id': 'autotest', + 'provider': 'autotest', + 'params': json.dumps({ + 'region': 'us-west-2', + 'instance_size': 'm1.medium' + }) + } + response = self.client.post( + url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) - + def test_release(self): """ Test that a release is created when a formation is created, and @@ -54,7 +62,8 @@ def test_release(self): # check that updating config rolls a new release url = '/api/formations/{formation_id}/config'.format(**locals()) body = {'values': json.dumps({'NEW_URL1': 'http://localhost:8080/'})} - response = self.client.post(url, json.dumps(body), content_type='application/json') + response = self.client.post( + url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) self.assertIn('NEW_URL1', json.loads(response.data['values'])) # check to see that a new release was created @@ -70,10 +79,16 @@ def test_release(self): # check that updating the build rolls a new release url = '/api/formations/{formation_id}/build'.format(**locals()) build_config = json.dumps({'PATH': 'bin:/usr/local/bin:/usr/bin:/bin'}) - body = {'sha': uuid.uuid4().hex, 'slug_size': 4096000, 'procfile': json.dumps({'web': 'node server.js'}), - 'url': 'http://deis.local/slugs/1c52739bbf3a44d3bfb9a58f7bbdd5fb.tar.gz', - 'checksum': uuid.uuid4().hex, 'config': build_config} - response = self.client.post(url, json.dumps(body), content_type='application/json') + body = { + 'sha': uuid.uuid4().hex, + 'slug_size': 4096000, + 'procfile': json.dumps({'web': 'node server.js'}), + 'url': + 'http://deis.local/slugs/1c52739bbf3a44d3bfb9a58f7bbdd5fb.tar.gz', + 'checksum': uuid.uuid4().hex, 'config': build_config, + } + response = self.client.post( + url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) self.assertEqual(response.data['url'], body['url']) # check to see that a new release was created @@ -93,11 +108,13 @@ def test_release(self): config3_values = json.loads(config3['values']) self.assertIn('NEW_URL1', config3_values) self.assertIn('PATH', config3_values) - self.assertEqual(config3_values['PATH'], 'bin:/usr/local/bin:/usr/bin:/bin') + self.assertEqual( + config3_values['PATH'], 'bin:/usr/local/bin:/usr/bin:/bin') # check that updating the image rolls a new release url = '/api/formations/{formation_id}/image'.format(**locals()) - body = {'image': 'deis/autotest2'} - response = self.client.post(url, json.dumps(body), content_type='application/json') + body = {'image': 'deis/autotest2'} + response = self.client.post( + url, json.dumps(body), content_type='application/json') self.assertEqual(response.status_code, 201) # check to see that a new release was created url = '/api/formations/{formation_id}/release'.format(**locals()) diff --git a/api/urls.py b/api/urls.py index 46ee3bbbda..2d76322faa 100644 --- a/api/urls.py +++ b/api/urls.py @@ -77,7 +77,7 @@ views.FormationViewSet.as_view({'get': 'retrieve', 'delete': 'destroy'})), url(r'^formations/?', views.FormationViewSet.as_view({'post': 'create', 'get': 'list'})), - + # authn / authz url(r'^auth/register/?', views.UserRegistrationView.as_view({'post': 'create'})), diff --git a/api/views.py b/api/views.py index a4246addb9..afda394b5a 100644 --- a/api/views.py +++ b/api/views.py @@ -4,19 +4,23 @@ # pylint: disable=R0901,R0904 from __future__ import unicode_literals +import json + from Crypto.PublicKey import RSA -from api import models, serializers from django.conf import settings from django.contrib.auth.models import Group, AnonymousUser, User from django.db.utils import IntegrityError +from django.http.response import Http404 from django.utils import timezone from rest_framework import permissions, status, viewsets from rest_framework.authentication import BaseAuthentication +from rest_framework.generics import get_object_or_404 from rest_framework.response import Response from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_201_CREATED -import json -from rest_framework.generics import get_object_or_404 -from django.http.response import Http404 + +from api import models +from api import serializers + class AnonymousAuthentication(BaseAuthentication): @@ -151,10 +155,10 @@ def create(self, request, **kwargs): request._data = request.DATA.copy() try: return OwnerViewSet.create(self, request, **kwargs) - except IntegrityError as _e: - return Response("Formation with this Id already exists.", + except IntegrityError: + return Response('Formation with this Id already exists.', status=HTTP_400_BAD_REQUEST) - + def post_save(self, formation, created=False, **kwargs): if created: config = models.Config.objects.create( @@ -203,7 +207,7 @@ def balance(self, request, **kwargs): databag = formation.balance() return Response(databag, status=status.HTTP_200_OK, content_type='application/json') - + def calculate(self, request, **kwargs): formation = self.get_object() databag = formation.calculate() @@ -280,7 +284,7 @@ def get_queryset(self, **kwargs): owner=self.request.user, id=self.kwargs['id']) return self.model.objects.filter(owner=self.request.user, formation=formation) - + def get_object(self, *args, **kwargs): qs = self.get_queryset(**kwargs) obj = get_object_or_404(qs, id=self.kwargs['node']) @@ -291,7 +295,7 @@ class FormationContainerViewSet(OwnerViewSet): model = models.Container serializer_class = serializers.ContainerSerializer - + def get_queryset(self, **kwargs): formation = models.Formation.objects.get( owner=self.request.user, id=self.kwargs['id']) @@ -322,8 +326,8 @@ def get_object(self, *args, **kwargs): def reset_image(self, request, *args, **kwargs): formation = models.Formation.objects.get( - owner=self.request.user, id=self.kwargs['id']) - models.release_signal.send(sender=self, image=request.DATA['image'], + owner=self.request.user, id=self.kwargs['id']) + models.release_signal.send(sender=self, image=request.DATA['image'], formation=formation, user=self.request.user) return Response(status=HTTP_201_CREATED) @@ -342,13 +346,14 @@ def get_queryset(self, **kwargs): def get_object(self, *args, **kwargs): formation = models.Formation.objects.get(id=self.kwargs['id']) config = self.model.objects.filter( - formation=formation).order_by('-created')[0] + formation=formation).order_by('-created')[0] return config def post_save(self, obj, created=False): if created: - models.release_signal.send(sender=self, - config=obj, formation=obj.formation, user=self.request.user) + models.release_signal.send( + sender=self, config=obj, formation=obj.formation, + user=self.request.user) # recalculate and converge after each config update databag = obj.formation.calculate() obj.formation.converge(databag) @@ -365,7 +370,7 @@ def create(self, request, *args, **kwargs): provided = json.loads(request.DATA['values']) values.update(provided) # remove config keys if we provided a null value - [ values.pop(k) for k, v in provided.items() if v is None ] + [values.pop(k) for k, v in provided.items() if v is None] request.DATA['values'] = values return super(OwnerViewSet, self).create(request, *args, **kwargs) @@ -388,8 +393,9 @@ def get_object(self, *args, **kwargs): def post_save(self, obj, created=False): if created: - models.release_signal.send(sender=self, - build=obj, formation=obj.formation, user=self.request.user) + models.release_signal.send( + sender=self, build=obj, formation=obj.formation, + user=self.request.user) def create(self, request, *args, **kwargs): request._data = request.DATA.copy() @@ -414,9 +420,9 @@ def get_queryset(self, **kwargs): owner=self.request.user, id=self.kwargs['id']) return self.model.objects.filter(owner=self.request.user, formation=formation) - + def get_object(self, *args, **kwargs): formation = models.Formation.objects.get(id=self.kwargs['id']) release = self.model.objects.filter( - formation=formation).order_by('-created')[0] + formation=formation).order_by('-created')[0] return release diff --git a/celerytasks/__init__.py b/celerytasks/__init__.py index 8b13789179..e69de29bb2 100644 --- a/celerytasks/__init__.py +++ b/celerytasks/__init__.py @@ -1 +0,0 @@ - diff --git a/celerytasks/azuresms.py b/celerytasks/azuresms.py index 3d957f252f..f75197c2cf 100644 --- a/celerytasks/azuresms.py +++ b/celerytasks/azuresms.py @@ -1,39 +1,49 @@ from __future__ import unicode_literals -import json -import time from azure.servicemanagement import ServiceManagementService from azure.servicemanagement import LinuxConfigurationSet, OSVirtualHardDisk - from celery import task import yaml from . import util -from api.models import Node -from deis import settings -from celerytasks.chef import ChefAPI + @task(name='azuresms.launch_node') def launch_node(node_id, creds, params, init, ssh_username, ssh_private_key): # install this manually in your virtual env https://github.com/WindowsAzure/azure-sdk-for-python - # http://scottdensmore.typepad.com/blog/2012/01/creating-a-ssl-certificate-for-the-cloud-ready-packages-for-the-ios-windows-azure-toolkit.html + # "pip install azure" + # http://scottdensmore.typepad.com/blog/2012/01/ + # creating-a-ssl-certificate-for-the-cloud-ready-packages-for-the-ios-windows-azure-toolkit.html # I got all these weird "random" errors that didnt effect anything - sms = ServiceManagementService(subscription_id="69581868-8a08-4d98-a5b0-1d111c616fc3", cert_file="/Users/dgriffin/certs/iOSWAToolkit.pem") + sms = ServiceManagementService( + subscription_id='69581868-8a08-4d98-a5b0-1d111c616fc3', + cert_file='/Users/dgriffin/certs/iOSWAToolkit.pem') for i in sms.list_os_images(): - print "I is ", i.name, " -- ", i.label, " -- ", i.location, " -- ", i.media_link - media_link = "http://opdemandstorage.blob.core.windows.net/communityimages/b39f27a8b8c64d52b05eac6a62ebad85__Ubuntu_DAILY_BUILD-precise-12_04_2-LTS-amd64-server-20130702-en-us-30GB.vhd" + print 'I is ', i.name, ' -- ', i.label, ' -- ', i.location, ' -- ', i.media_link + media_link = \ + 'http://opdemandstorage.blob.core.windows.net/communityimages/' + \ + 'b39f27a8b8c64d52b05eac6a62ebad85__Ubuntu_DAILY_BUILD-' + \ + 'precise-12_04_2-LTS-amd64-server-20130702-en-us-30GB.vhd' config = LinuxConfigurationSet(user_name="ubuntu", user_password="opdemand") - hard_disk = OSVirtualHardDisk("b39f27a8b8c64d52b05eac6a62ebad85__Ubuntu_DAILY_BUILD-precise-12_04_2-LTS-amd64-server-20130702-en-us-30GB", media_link, disk_label = "opdemandservice") - ret = sms.create_virtual_machine_deployment("opdemandservice", "deploy1", "production", "opdemandservice2", "opdemandservice3", config, hard_disk) - #service_name, deployment_name, deployment_slot, label, role_name, system_config, os_virtual_hard_disk - print "Ret ", ret + hard_disk = OSVirtualHardDisk( + 'b39f27a8b8c64d52b05eac6a62ebad85__Ubuntu_DAILY_BUILD-' + + 'precise-12_04_2-LTS-amd64-server-20130702-en-us-30GB', + media_link, disk_label='opdemandservice') + ret = sms.create_virtual_machine_deployment( + 'opdemandservice', 'deploy1', 'production', 'opdemandservice2', + 'opdemandservice3', config, hard_disk) + # service_name, deployment_name, deployment_slot, label, role_name + # system_config, os_virtual_hard_disk + print 'Ret ', ret return sms + @task(name='azuresms.terminate_node') def terminate_node(node_id, creds, params, provider_id): pass + @task(name='azuresms.converge_node') def converge_node(node_id, ssh_username, fqdn, ssh_private_key, command='sudo chef-client'): @@ -52,7 +62,7 @@ def prepare_run_kwargs(params, init): 'kernel_id': None, 'ramdisk_id': None, 'monitoring_enabled': False, 'subnet_id': None, 'block_device_map': None, - } + } # convert zone "any" to NoneType requested_zone = params.get('zone') if requested_zone and requested_zone.lower() == 'any': @@ -64,7 +74,7 @@ def prepare_run_kwargs(params, init): 'security_groups': params['security_groups'], 'placement': requested_zone, 'kernel_id': params.get('kernel', None), - } + } # update user_data cloud_config = '#cloud-config\n'+yaml.safe_dump(init) kwargs.update({'user_data': cloud_config}) @@ -77,8 +87,7 @@ def format_metadata(boto): return { 'architecture': boto.architecture, 'block_device_mapping': { - k: v.volume_id for k, v in boto.block_device_mapping.items() - }, + k: v.volume_id for k, v in boto.block_device_mapping.items()}, 'client_token': boto.client_token, 'dns_name': boto.dns_name, 'ebs_optimized': boto.ebs_optimized, @@ -121,6 +130,5 @@ def format_metadata(boto): if __name__ == "__main__": - print "Checking " + print 'Checking ' l = launch_node(None, None, None, None, None, None) - diff --git a/celerytasks/chef.py b/celerytasks/chef.py index a728330d7b..8af07227c7 100644 --- a/celerytasks/chef.py +++ b/celerytasks/chef.py @@ -11,9 +11,9 @@ import httplib import json import re +import urlparse from chef_rsa import Key -import urlparse def ruby_b64encode(value): @@ -25,9 +25,27 @@ def ruby_b64encode(value): yield b64[i:i+60] +class UTC(datetime.tzinfo): + """UTC timezone stub.""" + + ZERO = datetime.timedelta(0) + + def utcoffset(self, dt): + return self.ZERO + + def tzname(self, dt): + return 'UTC' + + def dst(self, dt): + return self.ZERO + + +utc = UTC() + + def canonical_time(timestamp): if timestamp.tzinfo is not None: - timestamp = timestamp.astimezone(utc).replace(tzinfo=None) # @UndefinedVariable + timestamp = timestamp.astimezone(utc).replace(tzinfo=None) return timestamp.replace(microsecond=0).isoformat() + 'Z' @@ -47,12 +65,14 @@ def canonical_request(http_method, path, hashed_body, timestamp, user_id): path = canonical_path(path) if isinstance(timestamp, datetime.datetime): timestamp = canonical_time(timestamp) - hashed_path = sha1_base64(path) # @UnusedVariable - return ('Method:%(http_method)s\n' - 'Hashed Path:%(hashed_path)s\n' - 'X-Ops-Content-Hash:%(hashed_body)s\n' - 'X-Ops-Timestamp:%(timestamp)s\n' - 'X-Ops-UserId:%(user_id)s' % vars()) + hashed_path = sha1_base64(path) + return """\ +Method:{} +Hashed Path:{} +X-Ops-Content-Hash:{} +X-Ops-Timestamp:{} +X-Ops-UserId:{}""".format(http_method, hashed_path, hashed_body, timestamp, + user_id) def sha1_base64(value): @@ -69,21 +89,21 @@ def create_authorization(blank_headers, verb, url, priv_key, user, body=''): b64_priv = ruby_b64encode(rsa_key.private_encrypt(canon)) for i, line in enumerate(b64_priv): - headers["X-Ops-Authorization-" + str(i + 1)] = line + headers['X-Ops-Authorization-' + str(i + 1)] = line - headers["X-Ops-Timestamp"] = timestamp - headers["X-Ops-Content-Hash"] = hashed_body - headers["X-Ops-UserId"] = user + headers['X-Ops-Timestamp'] = timestamp + headers['X-Ops-Content-Hash'] = hashed_body + headers['X-Ops-UserId'] = user return headers class ChefAPI(object): - + headers = { - "Accept": "application/json", - "X-Chef-Version": "11.0.4.x", - "X-Ops-Sign": "version=1.0", - "Content-Type": "application/json" + 'Accept': 'application/json', + 'X-Chef-Version': '11.0.4.x', + 'X-Ops-Sign': 'version=1.0', + 'Content-Type': 'application/json' } def __init__(self, server_url, client_name, client_key): @@ -92,56 +112,57 @@ def __init__(self, server_url, client_name, client_key): self.client_key = client_key self.hostname = urlparse.urlsplit(self.server_url).netloc self.path = urlparse.urlsplit(self.server_url).path - self.headers.update({"Host": self.hostname}) + self.headers.update({'Host': self.hostname}) self.conn = httplib.HTTPSConnection(self.hostname) self.conn.connect() def request(self, verb, path, body=''): url = self.path + path - headers = create_authorization(self.headers, verb, url, self.client_key, self.client_name, body) + headers = create_authorization( + self.headers, verb, url, self.client_key, self.client_name, body) self.conn.request(verb, url, body=body, headers=headers) resp = self.conn.getresponse() return resp.read(), resp.status def create_databag(self, name): - body = json.dumps({"name": name, "id" : name}) - resp = self.request("POST", "/data" , body) + body = json.dumps({'name': name, 'id': name}) + resp = self.request('POST', '/data', body) return resp - + def create_databag_item(self, name, item_name, item_value): - item_dict = {"id" : item_name } + item_dict = {'id': item_name} item_dict.update(item_value) - body = json.dumps( item_dict ) - resp = self.request("POST", "/data/%s" % name, body) + body = json.dumps(item_dict) + resp = self.request('POST', '/data/%s' % name, body) return resp def get_databag(self, bag_name): - return self.request("GET", "/data/%s" % bag_name) + return self.request('GET', '/data/%s' % bag_name) def delete_databag(self, bag_name): - return self.request("DELETE", "/data/%s" % bag_name) + return self.request('DELETE', '/data/%s' % bag_name) def delete_databag_item(self, bag_name, item_name): - return self.request("DELETE", "/data/%s/%s" % (bag_name, item_name)) - + return self.request('DELETE', '/data/%s/%s' % (bag_name, item_name)) + def update_databag_item(self, bag_name, item_name, item_value): body = json.dumps(item_value) - return self.request("PUT", "/data/%s/%s" % ( bag_name, item_name ), body) + return self.request('PUT', '/data/%s/%s' % (bag_name, item_name), body) def get_databag_item(self, bag_name, item_name): - return self.request("GET", "/data/%s/%s" % ( bag_name, item_name )) + return self.request('GET', '/data/%s/%s' % (bag_name, item_name)) def get_all_cookbooks(self): - return self.request("GET", "/cookbooks" ) + return self.request('GET', '/cookbooks') def get_node(self, node_id): - return self.request("GET", "/nodes/%s" % node_id) - + return self.request('GET', '/nodes/%s' % node_id) + def delete_node(self, node_id): - return self.request("DELETE", "/nodes/%s" % node_id) - + return self.request('DELETE', '/nodes/%s' % node_id) + def delete_client(self, client_id): - return self.request("DELETE", "/clients/%s" % client_id) + return self.request('DELETE', '/clients/%s' % client_id) # def create_cookbook(self, cookbook_name, cookbooks, priv_key, user, org): # checksums = {} @@ -154,30 +175,39 @@ def delete_client(self, client_id): # hasher.update(json_cb) # check = hasher.hexdigest() # checksums[check] = None -# by_cb[c["name"]] = check -# body = json.dumps({"checksums": checksums}) -# sandbox = json.loads(self.request("POST", "/sandboxes")) -# print "Sandbox is ", sandbox -# for k, v in sandbox["checksums"].items(): -# print "URL ", v -# if "url" in v: -# print "Trigger it ", self.request("PUT", v["url"][25:], json_cb, priv_key, user) -# -# print "Mark as uploaded ", self.request("PUT", sandbox["uri"][25:], """{"is_completed":true}""", priv_key, user) -# print "Mark as uploaded ", self.request("PUT", sandbox["uri"][25:], """{"is_completed":true}""", priv_key, user) -# print "Mark as uploaded ", self.request("PUT", sandbox["uri"][25:], """{"is_completed":true}""", priv_key, user) -# print "Mark as uploaded ", self.request("PUT", sandbox["uri"][25:], """{"is_completed":true}""", priv_key, user) -# +# by_cb[c['name']] = check +# body = json.dumps({'checksums': checksums}) +# sandbox = json.loads(self.request('POST', '/sandboxes')) +# print 'Sandbox is ', sandbox +# for k, v in sandbox['checksums'].items(): +# print 'URL ', v +# if 'url' in v: +# print 'Trigger it ', self.request( +# 'PUT', v['url'][25:], json_cb, priv_key, user) +# +# print 'Mark as uploaded ', self.request( +# 'PUT', sandbox['uri'][25:], '''{'is_completed':true}''', priv_key, +# user) +# print 'Mark as uploaded ', self.request( +# 'PUT', sandbox['uri'][25:], '''{'is_completed':true}''', priv_key, +# user) +# print 'Mark as uploaded ', self.request( +# 'PUT', sandbox['uri'][25:], '''{'is_completed':true}''', priv_key, +# user) +# print 'Mark as uploaded ', self.request( +# 'PUT', sandbox['uri'][25:], '''{'is_completed':true}''', priv_key, +# user) +# # for c in cookbooks: -# c["definitions"] = [{ -# "name": "unicorn_config.rb", -# "checksum": by_cb[c["name"]], -# "path": "definitions/unicorn_config.rb", -# "specificity": "default" +# c['definitions'] = [{ +# 'name': 'unicorn_config.rb', +# 'checksum': by_cb[c['name']], +# 'path': 'definitions/unicorn_config.rb', +# 'specificity': 'default' # }], -# return self.request("PUT", "/organizations/%s/cookbooks/%s/1" % +# return self.request('PUT', '/organizations/%s/cookbooks/%s/1' % # (org, cookbook_name), body, priv_key, user) -# +# # @task(name='chef.update_data_bag_item') # def update_data_bag_item(conn_info, bag_name, item_name, item_value): # client = ChefAPI(conn_info['server_url'], @@ -185,4 +215,3 @@ def delete_client(self, client_id): # conn_info['client_key'], # conn_info['organization']) # client.update_databag_item(bag_name, item_name, item_value) - diff --git a/celerytasks/chef_mock.py b/celerytasks/chef_mock.py index 55beb8c500..61e19abfc5 100644 --- a/celerytasks/chef_mock.py +++ b/celerytasks/chef_mock.py @@ -1,5 +1,4 @@ """ - https://github.com/coderanger/pychef We want a simpler version for making API calls @@ -10,38 +9,39 @@ class ChefAPI(object): def __init__(self, chef_url, client_name, client_key): - self.server_url = server_url + self.server_url = chef_url self.client_name = client_name self.client_key = client_key def request(self, verb, path, body=''): - assert verb in ("GET", "DELETE", "PUT", "POST") - assert path + assert verb in ('GET', 'DELETE', 'PUT', 'POST') + assert path assert body + def create_databag(self, name): - body = json.dumps({"name": name, "id" : name}) - resp = self.request("POST", "/data" , body) + body = json.dumps({'name': name, 'id': name}) + resp = self.request('POST', '/data', body) return resp def create_databag_item(self, name, item_name, item_value): - item_dict = {"id" : item_name } + item_dict = {'id': item_name} item_dict.update(item_value) - body = json.dumps( item_dict ) - resp = self.request("POST", "/data/%s" % name, body) + body = json.dumps(item_dict) + resp = self.request('POST', "/data/%s" % name, body) return resp def get_databag(self, bag_name): - return self.request("GET", "/data/%s" % bag_name) + return self.request('GET', "/data/%s" % bag_name) def delete_databag(self, bag_name): - return self.request("DELETE", "/data/%s" % bag_name) + return self.request('DELETE', "/data/%s" % bag_name) def update_databag_item(self, bag_name, item_name, item_value): body = json.dumps(item_value) - return self.request("PUT", "/data/%s/%s" % ( bag_name, item_name ), body) + return self.request('PUT', "/data/%s/%s" % (bag_name, item_name), body) def get_databag_item(self, bag_name, item_name): - return self.request("GET", "/data/%s/%s" % ( bag_name, item_name )) + return self.request('GET', "/data/%s/%s" % (bag_name, item_name)) def get_all_cookbooks(self): - return self.request("GET", "/cookbooks" ) + return self.request('GET', '/cookbooks') diff --git a/celerytasks/chef_rsa.py b/celerytasks/chef_rsa.py index 5bed60199d..37de7b2ad4 100644 --- a/celerytasks/chef_rsa.py +++ b/celerytasks/chef_rsa.py @@ -2,10 +2,20 @@ This file as copied from pyChef at https://github.com/coderanger/pychef """ +from ctypes import CDLL +from ctypes import byref +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_long +from ctypes import c_size_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import create_string_buffer +from ctypes import string_at import sys -from ctypes import * -if sys.platform == 'win32' or sys.platform == 'cygwin': + +if sys.platform in ('win32', 'cygwin'): _eay = CDLL('libeay32.dll') elif sys.platform == 'darwin': _eay = CDLL('libcrypto.dylib') diff --git a/celerytasks/controller.py b/celerytasks/controller.py index 76b84c226b..c92baedef9 100644 --- a/celerytasks/controller.py +++ b/celerytasks/controller.py @@ -32,13 +32,15 @@ def update_formation(formation_id, databag_item_value): if code == 200: return resp, code elif code == 404: - resp, code = client.create_databag_item('deis-formations', - formation_id, databag_item_value) + resp, code = client.create_databag_item( + 'deis-formations', formation_id, databag_item_value) if code != 201: - raise RuntimeError('Failed to create data bag: {code} => {resp}'.format(**locals())) + msg = 'Failed to create data bag: {code} => {resp}' + raise RuntimeError(msg.format(**locals())) else: - raise RuntimeError('Failed to update data bag: {code} => {resp}'.format(**locals())) - + msg = 'Failed to update data bag: {code} => {resp}' + raise RuntimeError(msg.format(**locals())) + @task(name='controller.destroy_formation') def destroy_formation(formation_id): diff --git a/celerytasks/tests/__init__.py b/celerytasks/tests/__init__.py index 61b855183e..d85db08382 100644 --- a/celerytasks/tests/__init__.py +++ b/celerytasks/tests/__init__.py @@ -1,2 +1,3 @@ -from chef import * -from azuretest import * + +from azuretest import * # noqa +from chef import * # noqa diff --git a/celerytasks/tests/azuretest.py b/celerytasks/tests/azuretest.py index 72508fd192..1adb147749 100644 --- a/celerytasks/tests/azuretest.py +++ b/celerytasks/tests/azuretest.py @@ -1,17 +1,15 @@ from __future__ import unicode_literals -import json import unittest from celerytasks import azuresms -from deis import settings +# from deis import settings class AzureTest(unittest.TestCase): """Tests the client interface to Chef Server API.""" + @unittest.skip('Windows Azure is not yet supported.') def test_launch(self): - - #l = azuresms.launch_node(None, None, None, None, None, None) - + l = azuresms.launch_node(None, None, None, None, None, None) print "L is ", l diff --git a/celerytasks/tests/chef.py b/celerytasks/tests/chef.py index df05aed9d5..e7be28b530 100644 --- a/celerytasks/tests/chef.py +++ b/celerytasks/tests/chef.py @@ -13,6 +13,7 @@ from deis import settings +@unittest.skip('Need to set up TEST_CHEF_SERVER somehow.') class ChefAPITest(unittest.TestCase): """Tests the client interface to Chef Server API.""" @@ -22,32 +23,32 @@ def setUp(self): settings.TEST_CHEF_CLIENT_KEY) def test_databag(self): - dbag_name = 'testing' + databag_name = 'testing' ditem_name = 'item1' ditem_value = {'something': 1, 'else': 2} # delete the databag to make sure we are creating a new one - resp, status = self.client.delete_databag(dbag_name) + resp, status = self.client.delete_databag(databag_name) - resp, status = self.client.create_databag(dbag_name) + resp, status = self.client.create_databag(databag_name) self.assertEqual(status, 201) self.assertTrue(resp) resp = self.client.create_databag_item( - dbag_name, ditem_name, ditem_value) + databag_name, ditem_name, ditem_value) self.assertEqual(status, 201) self.assertTrue(resp) - resp, status = self.client.get_databag(dbag_name) + resp, status = self.client.get_databag(databag_name) self.assertEqual(status, 200) - resp, status = self.client.get_databag_item(dbag_name, ditem_name) + resp, status = self.client.get_databag_item(databag_name, ditem_name) self.assertEqual(status, 200) ditem_value = json.loads(resp) ditem_value['newvalue'] = 'databag' resp, status = self.client.update_databag_item( - dbag_name, ditem_name, ditem_value) + databag_name, ditem_name, ditem_value) self.assertEqual(status, 200) - resp, status = self.client.get_databag_item(dbag_name, ditem_name) + resp, status = self.client.get_databag_item(databag_name, ditem_name) self.assertEqual(status, 200) self.assertTrue('newvalue' in json.loads(resp)) diff --git a/celerytasks/util.py b/celerytasks/util.py index 896156a79e..be65e03686 100644 --- a/celerytasks/util.py +++ b/celerytasks/util.py @@ -1,9 +1,11 @@ + import StringIO -import paramiko import select import socket import time +import paramiko + def connect_ssh(username, hostname, port, key): key_f = StringIO.StringIO(key) diff --git a/client/Makefile b/client/Makefile index 871e44ab4a..5efa1c5dd3 100644 --- a/client/Makefile +++ b/client/Makefile @@ -1,7 +1,12 @@ -# Run "pip install pep8" to enable PEP8 code style checking with "make pep8" -pep8: - pep8 deis -# Run "pip install pyflakes" to enable pyflakes checking with "make pyflakes" -pyflakes: - pyflakes deis +# Install deis client locally through pip. +install: + pip install deis + +# Remove the installed copy of the deis client through pip. +uninstall: + pip uninstall -y deis + +# Clean up after setup.py droppings. +clean: + rm -rf build/ dist/ *.egg-info diff --git a/client/deis/client.py b/client/deis/client.py index 972fbd032a..e1e55aa0a4 100755 --- a/client/deis/client.py +++ b/client/deis/client.py @@ -295,14 +295,19 @@ def auth_register(self, args): password = getpass('password: ') email = args.get('--email') if not email: +<<<<<<< HEAD email = raw_input ('email: ') url = urlparse.urljoin(controller, '/api/auth/register') +======= + email = raw_input('email: ') + url = urlparse.urljoin(controller, '/api/register') +>>>>>>> master payload = {'username': username, 'password': password, 'email': email} response = self._session.post(url, data=payload, allow_redirects=False) if response.status_code == requests.codes.created: # @UndefinedVariable self._settings['controller'] = controller self._settings.save() - print('Registered {}'.format(username)) + print("Registered {}".format(username)) login_args = {'--username': username, '--password': password, '': controller} # login after registering @@ -335,7 +340,7 @@ def auth_login(self, args): if response.status_code == requests.codes.found: # @UndefinedVariable self._settings['controller'] = controller self._settings.save() - print('Logged in as {}'.format(username)) + print("Logged in as {}".format(username)) return True else: print('Login failed') @@ -354,6 +359,22 @@ def auth_logout(self, args): self._settings.save() print('Logged out') +<<<<<<< HEAD +======= + def backends_list(self, args): + formation = args.get('--formation') + if not formation: + formation = self._session.formation + response = self._dispatch('get', "/formations/{}/backends".format(formation)) + if response.status_code == requests.codes.ok: # @UndefinedVariable + print('=== {0}'.format(formation)) + data = response.json() + for item in data['results']: + print('{0[uuid]:<23} {0[created]}'.format(item)) + else: + print('Error!', response.text) + +>>>>>>> master def builds_create(self, args): formation = args.get('--formation') if not formation: @@ -407,8 +428,8 @@ def config_set(self, args): if not formation: formation = self._session.formation body = {'values': json.dumps(dictify(args['=']))} - response = self._dispatch('post', - '/formations/{}/config'.format(formation), + response = self._dispatch('post', + '/formations/{}/config'.format(formation), json.dumps(body)) if response.status_code == requests.codes.created: # @UndefinedVariable config = response.json() @@ -475,8 +496,31 @@ def containers_scale(self, args): '/api/formations/{}/scale/containers'.format(formation), json.dumps(body)) if response.status_code == requests.codes.ok: # @UndefinedVariable +<<<<<<< HEAD databag = json.loads(response.content) print(json.dumps(databag, indent=2)) +======= + data = response.json() + if data['count'] == 0: + print 'No keys found' + return + print('=== {0} Keys'.format(data['results'][0]['owner'])) + for key in data['results']: + public = key['public'] + print('{0} {1}...{2}'.format( + key['id'], public[0:16], public[-10:])) + else: + print('Error!', response.text) + + def keys_remove(self, args): + """Remove a specific SSH key for the logged in user.""" + key = args.get('') + sys.stdout.write('Removing {0} SSH Key... '.format(key)) + sys.stdout.flush() + response = self._dispatch('delete', '/keys/{}'.format(key)) + if response.status_code == requests.codes.no_content: # @UndefinedVariable + print('done') +>>>>>>> master else: print('Error!', response.text) @@ -603,8 +647,18 @@ def formations_calculate(self, args): formation = args.get('--formation') if not formation: formation = self._session.formation +<<<<<<< HEAD response = self._dispatch('post', '/api/formations/{}/calculate'.format(formation)) +======= + body = {} + for type_num in args.get(''): + typ, count = type_num.split('=') + body.update({typ: int(count)}) + response = self._dispatch('post', + '/api/formations/{}/scale'.format(formation), + json.dumps(body)) +>>>>>>> master if response.status_code == requests.codes.ok: # @UndefinedVariable databag = json.loads(response.content) print(json.dumps(databag, indent=2)) @@ -615,8 +669,13 @@ def formations_balance(self, args): formation = args.get('--formation') if not formation: formation = self._session.formation +<<<<<<< HEAD response = self._dispatch('post', '/api/formations/{}/balance'.format(formation)) +======= + response = self._dispatch('post', + '/api/formations/{}/calculate'.format(formation)) +>>>>>>> master if response.status_code == requests.codes.ok: # @UndefinedVariable databag = json.loads(response.content) print(json.dumps(databag, indent=2)) @@ -627,8 +686,13 @@ def formations_converge(self, args): formation = args.get('--formation') if not formation: formation = self._session.formation +<<<<<<< HEAD response = self._dispatch('post', '/api/formations/{}/converge'.format(formation)) +======= + response = self._dispatch('post', + '/api/formations/{}/balance'.format(formation)) +>>>>>>> master if response.status_code == requests.codes.ok: # @UndefinedVariable databag = json.loads(response.content) print(json.dumps(databag, indent=2)) @@ -720,6 +784,7 @@ def layers_destroy(self, args): formation = args.get('--formation') if not formation: formation = self._session.formation +<<<<<<< HEAD layer = args[''] sys.stdout.write('Destroying layer {layer}...'.format(**locals())) sys.stdout.flush() @@ -757,6 +822,10 @@ def layers_scale(self, args): response = self._dispatch('post', '/api/formations/{}/scale/layers'.format(formation), json.dumps(body)) +======= + response = self._dispatch('post', + '/api/formations/{}/converge'.format(formation)) +>>>>>>> master if response.status_code == requests.codes.ok: # @UndefinedVariable databag = json.loads(response.content) print(json.dumps(databag, indent=2)) @@ -777,7 +846,7 @@ def nodes_list(self, args): formation = args.get('--formation') if not formation: formation = self._session.formation - response = self._dispatch('get', + response = self._dispatch('get', '/api/formations/{}/nodes'.format(formation)) if response.status_code == requests.codes.ok: # @UndefinedVariable print('=== {0}'.format(formation)) diff --git a/client/deis/docopt.py b/client/deis/docopt.py deleted file mode 100644 index 6bec33b9ea..0000000000 --- a/client/deis/docopt.py +++ /dev/null @@ -1,581 +0,0 @@ -"""Pythonic command-line interface parser that will make you smile. - - * http://docopt.org - * Repository and issue-tracker: https://github.com/docopt/docopt - * Licensed under terms of MIT license (see LICENSE-MIT) - * Copyright (c) 2013 Vladimir Keleshev, vladimir@keleshev.com - -""" -import sys -import re - - -__all__ = ['docopt'] -__version__ = '0.6.1' - - -class DocoptLanguageError(Exception): - - """Error in construction of usage-message by developer.""" - - -class DocoptExit(SystemExit): - - """Exit in case user invoked program with incorrect arguments.""" - - usage = '' - - def __init__(self, message=''): - SystemExit.__init__(self, (message + '\n' + self.usage).strip()) - - -class Pattern(object): - - def __eq__(self, other): - return repr(self) == repr(other) - - def __hash__(self): - return hash(repr(self)) - - def fix(self): - self.fix_identities() - self.fix_repeating_arguments() - return self - - def fix_identities(self, uniq=None): - """Make pattern-tree tips point to same object if they are equal.""" - if not hasattr(self, 'children'): - return self - uniq = list(set(self.flat())) if uniq is None else uniq - for i, child in enumerate(self.children): - if not hasattr(child, 'children'): - assert child in uniq - self.children[i] = uniq[uniq.index(child)] - else: - child.fix_identities(uniq) - - def fix_repeating_arguments(self): - """Fix elements that should accumulate/increment values.""" - either = [list(child.children) for child in transform(self).children] - for case in either: - for e in [child for child in case if case.count(child) > 1]: - if type(e) is Argument or type(e) is Option and e.argcount: - if e.value is None: - e.value = [] - elif type(e.value) is not list: - e.value = e.value.split() - if type(e) is Command or type(e) is Option and e.argcount == 0: - e.value = 0 - return self - - -def transform(pattern): - """Expand pattern into an (almost) equivalent one, but with single Either. - - Example: ((-a | -b) (-c | -d)) => (-a -c | -a -d | -b -c | -b -d) - Quirks: [-a] => (-a), (-a...) => (-a -a) - - """ - result = [] - groups = [[pattern]] - while groups: - children = groups.pop(0) - parents = [Required, Optional, OptionsShortcut, Either, OneOrMore] - if any(t in map(type, children) for t in parents): - child = [c for c in children if type(c) in parents][0] - children.remove(child) - if type(child) is Either: - for c in child.children: - groups.append([c] + children) - elif type(child) is OneOrMore: - groups.append(child.children * 2 + children) - else: - groups.append(child.children + children) - else: - result.append(children) - return Either(*[Required(*e) for e in result]) - - -class LeafPattern(Pattern): - - """Leaf/terminal node of a pattern tree.""" - - def __init__(self, name, value=None): - self.name, self.value = name, value - - def __repr__(self): - return '%s(%r, %r)' % (self.__class__.__name__, self.name, self.value) - - def flat(self, *types): - return [self] if not types or type(self) in types else [] - - def match(self, left, collected=None): - collected = [] if collected is None else collected - pos, match = self.single_match(left) - if match is None: - return False, left, collected - left_ = left[:pos] + left[pos + 1:] - same_name = [a for a in collected if a.name == self.name] - if type(self.value) in (int, list): - if type(self.value) is int: - increment = 1 - else: - increment = ([match.value] if type(match.value) is str - else match.value) - if not same_name: - match.value = increment - return True, left_, collected + [match] - same_name[0].value += increment - return True, left_, collected - return True, left_, collected + [match] - - -class BranchPattern(Pattern): - - """Branch/inner node of a pattern tree.""" - - def __init__(self, *children): - self.children = list(children) - - def __repr__(self): - return '%s(%s)' % (self.__class__.__name__, - ', '.join(repr(a) for a in self.children)) - - def flat(self, *types): - if type(self) in types: - return [self] - return sum([child.flat(*types) for child in self.children], []) - - -class Argument(LeafPattern): - - def single_match(self, left): - for n, pattern in enumerate(left): - if type(pattern) is Argument: - return n, Argument(self.name, pattern.value) - return None, None - - @classmethod - def parse(class_, source): - name = re.findall('(<\S*?>)', source)[0] - value = re.findall('\[default: (.*)\]', source, flags=re.I) - return class_(name, value[0] if value else None) - - -class Command(Argument): - - def __init__(self, name, value=False): - self.name, self.value = name, value - - def single_match(self, left): - for n, pattern in enumerate(left): - if type(pattern) is Argument: - if pattern.value == self.name: - return n, Command(self.name, True) - else: - break - return None, None - - -class Option(LeafPattern): - - def __init__(self, short=None, long=None, argcount=0, value=False): - assert argcount in (0, 1) - self.short, self.long, self.argcount = short, long, argcount - self.value = None if value is False and argcount else value - - @classmethod - def parse(class_, option_description): - short, long, argcount, value = None, None, 0, False - options, _, description = option_description.strip().partition(' ') - options = options.replace(',', ' ').replace('=', ' ') - for s in options.split(): - if s.startswith('--'): - long = s - elif s.startswith('-'): - short = s - else: - argcount = 1 - if argcount: - matched = re.findall('\[default: (.*)\]', description, flags=re.I) - value = matched[0] if matched else None - return class_(short, long, argcount, value) - - def single_match(self, left): - for n, pattern in enumerate(left): - if self.name == pattern.name: - return n, pattern - return None, None - - @property - def name(self): - return self.long or self.short - - def __repr__(self): - return 'Option(%r, %r, %r, %r)' % (self.short, self.long, - self.argcount, self.value) - - -class Required(BranchPattern): - - def match(self, left, collected=None): - collected = [] if collected is None else collected - l = left - c = collected - for pattern in self.children: - matched, l, c = pattern.match(l, c) - if not matched: - return False, left, collected - return True, l, c - - -class Optional(BranchPattern): - - def match(self, left, collected=None): - collected = [] if collected is None else collected - for pattern in self.children: - m, left, collected = pattern.match(left, collected) - return True, left, collected - - -class OptionsShortcut(Optional): - - """Marker/placeholder for [options] shortcut.""" - - -class OneOrMore(BranchPattern): - - def match(self, left, collected=None): - assert len(self.children) == 1 - collected = [] if collected is None else collected - l = left - c = collected - l_ = None - matched = True - times = 0 - while matched: - # could it be that something didn't match but changed l or c? - matched, l, c = self.children[0].match(l, c) - times += 1 if matched else 0 - if l_ == l: - break - l_ = l - if times >= 1: - return True, l, c - return False, left, collected - - -class Either(BranchPattern): - - def match(self, left, collected=None): - collected = [] if collected is None else collected - outcomes = [] - for pattern in self.children: - matched, _, _ = outcome = pattern.match(left, collected) - if matched: - outcomes.append(outcome) - if outcomes: - return min(outcomes, key=lambda outcome: len(outcome[1])) - return False, left, collected - - -class Tokens(list): - - def __init__(self, source, error=DocoptExit): - self += source.split() if hasattr(source, 'split') else source - self.error = error - - @staticmethod - def from_pattern(source): - source = re.sub(r'([\[\]\(\)\|]|\.\.\.)', r' \1 ', source) - source = [s for s in re.split('\s+|(\S*<.*?>)', source) if s] - return Tokens(source, error=DocoptLanguageError) - - def move(self): - return self.pop(0) if len(self) else None - - def current(self): - return self[0] if len(self) else None - - -def parse_long(tokens, options): - """long ::= '--' chars [ ( ' ' | '=' ) chars ] ;""" - long, eq, value = tokens.move().partition('=') - assert long.startswith('--') - value = None if eq == value == '' else value - similar = [o for o in options if o.long == long] - if tokens.error is DocoptExit and similar == []: # if no exact match - similar = [o for o in options if o.long and o.long.startswith(long)] - if len(similar) > 1: # might be simply specified ambiguously 2+ times? - raise tokens.error('%s is not a unique prefix: %s?' % - (long, ', '.join(o.long for o in similar))) - elif len(similar) < 1: - argcount = 1 if eq == '=' else 0 - o = Option(None, long, argcount) - options.append(o) - if tokens.error is DocoptExit: - o = Option(None, long, argcount, value if argcount else True) - else: - o = Option(similar[0].short, similar[0].long, - similar[0].argcount, similar[0].value) - if o.argcount == 0: - if value is not None: - raise tokens.error('%s must not have an argument' % o.long) - else: - if value is None: - if tokens.current() in [None, '--']: - raise tokens.error('%s requires argument' % o.long) - value = tokens.move() - if tokens.error is DocoptExit: - o.value = value if value is not None else True - return [o] - - -def parse_shorts(tokens, options): - """shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ;""" - token = tokens.move() - assert token.startswith('-') and not token.startswith('--') - left = token.lstrip('-') - parsed = [] - while left != '': - short, left = '-' + left[0], left[1:] - similar = [o for o in options if o.short == short] - if len(similar) > 1: - raise tokens.error('%s is specified ambiguously %d times' % - (short, len(similar))) - elif len(similar) < 1: - o = Option(short, None, 0) - options.append(o) - if tokens.error is DocoptExit: - o = Option(short, None, 0, True) - else: # why copying is necessary here? - o = Option(short, similar[0].long, - similar[0].argcount, similar[0].value) - value = None - if o.argcount != 0: - if left == '': - if tokens.current() in [None, '--']: - raise tokens.error('%s requires argument' % short) - value = tokens.move() - else: - value = left - left = '' - if tokens.error is DocoptExit: - o.value = value if value is not None else True - parsed.append(o) - return parsed - - -def parse_pattern(source, options): - tokens = Tokens.from_pattern(source) - result = parse_expr(tokens, options) - if tokens.current() is not None: - raise tokens.error('unexpected ending: %r' % ' '.join(tokens)) - return Required(*result) - - -def parse_expr(tokens, options): - """expr ::= seq ( '|' seq )* ;""" - seq = parse_seq(tokens, options) - if tokens.current() != '|': - return seq - result = [Required(*seq)] if len(seq) > 1 else seq - while tokens.current() == '|': - tokens.move() - seq = parse_seq(tokens, options) - result += [Required(*seq)] if len(seq) > 1 else seq - return [Either(*result)] if len(result) > 1 else result - - -def parse_seq(tokens, options): - """seq ::= ( atom [ '...' ] )* ;""" - result = [] - while tokens.current() not in [None, ']', ')', '|']: - atom = parse_atom(tokens, options) - if tokens.current() == '...': - atom = [OneOrMore(*atom)] - tokens.move() - result += atom - return result - - -def parse_atom(tokens, options): - """atom ::= '(' expr ')' | '[' expr ']' | 'options' - | long | shorts | argument | command ; - """ - token = tokens.current() - result = [] - if token in '([': - tokens.move() - matching, pattern = {'(': [')', Required], '[': [']', Optional]}[token] - result = pattern(*parse_expr(tokens, options)) - if tokens.move() != matching: - raise tokens.error("unmatched '%s'" % token) - return [result] - elif token == 'options': - tokens.move() - return [OptionsShortcut()] - elif token.startswith('--') and token != '--': - return parse_long(tokens, options) - elif token.startswith('-') and token not in ('-', '--'): - return parse_shorts(tokens, options) - elif token.startswith('<') and token.endswith('>') or token.isupper(): - return [Argument(tokens.move())] - else: - return [Command(tokens.move())] - - -def parse_argv(tokens, options, options_first=False): - """Parse command-line argument vector. - - If options_first: - argv ::= [ long | shorts ]* [ argument ]* [ '--' [ argument ]* ] ; - else: - argv ::= [ long | shorts | argument ]* [ '--' [ argument ]* ] ; - - """ - parsed = [] - while tokens.current() is not None: - if tokens.current() == '--': - return parsed + [Argument(None, v) for v in tokens] - elif tokens.current().startswith('--'): - parsed += parse_long(tokens, options) - elif tokens.current().startswith('-') and tokens.current() != '-': - parsed += parse_shorts(tokens, options) - elif options_first: - return parsed + [Argument(None, v) for v in tokens] - else: - parsed.append(Argument(None, tokens.move())) - return parsed - - -def parse_defaults(doc): - defaults = [] - for s in parse_section('options:', doc): - # FIXME corner case "bla: options: --foo" - _, _, s = s.partition(':') # get rid of "options:" - split = re.split('\n *(-\S+?)', '\n' + s)[1:] - split = [s1 + s2 for s1, s2 in zip(split[::2], split[1::2])] - options = [Option.parse(s) for s in split if s.startswith('-')] - defaults += options - return defaults - - -def parse_section(name, source): - pattern = re.compile('^([^\n]*' + name + '[^\n]*\n?(?:[ \t].*?(?:\n|$))*)', - re.IGNORECASE | re.MULTILINE) - return [s.strip() for s in pattern.findall(source)] - - -def formal_usage(section): - _, _, section = section.partition(':') # drop "usage:" - pu = section.split() - return '( ' + ' '.join(') | (' if s == pu[0] else s for s in pu[1:]) + ' )' - - -def extras(help, version, options, doc): - if help and any((o.name in ('-h', '--help')) and o.value for o in options): - print(doc.strip("\n")) - sys.exit() - if version and any(o.name == '--version' and o.value for o in options): - print(version) - sys.exit() - - -class Dict(dict): - def __repr__(self): - return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items())) - - -def docopt(doc, argv=None, help=True, version=None, options_first=False): - """Parse `argv` based on command-line interface described in `doc`. - - `docopt` creates your command-line interface based on its - description that you pass as `doc`. Such description can contain - --options, , commands, which could be - [optional], (required), (mutually | exclusive) or repeated... - - Parameters - ---------- - doc : str - Description of your command-line interface. - argv : list of str, optional - Argument vector to be parsed. sys.argv[1:] is used if not - provided. - help : bool (default: True) - Set to False to disable automatic help on -h or --help - options. - version : any object - If passed, the object will be printed if --version is in - `argv`. - options_first : bool (default: False) - Set to True to require options precede positional arguments, - i.e. to forbid options and positional arguments intermix. - - Returns - ------- - args : dict - A dictionary, where keys are names of command-line elements - such as e.g. "--verbose" and "", and values are the - parsed values of those elements. - - Example - ------- - >>> from docopt import docopt - >>> doc = ''' - ... Usage: - ... my_program tcp [--timeout=] - ... my_program serial [--baud=] [--timeout=] - ... my_program (-h | --help | --version) - ... - ... Options: - ... -h, --help Show this screen and exit. - ... --baud= Baudrate [default: 9600] - ... ''' - >>> argv = ['tcp', '127.0.0.1', '80', '--timeout', '30'] - >>> docopt(doc, argv) - {'--baud': '9600', - '--help': False, - '--timeout': '30', - '--version': False, - '': '127.0.0.1', - '': '80', - 'serial': False, - 'tcp': True} - - See also - -------- - * For video introduction see http://docopt.org - * Full documentation is available in README.rst as well as online - at https://github.com/docopt/docopt#readme - - """ - argv = sys.argv[1:] if argv is None else argv - - usage_sections = parse_section('usage:', doc) - if len(usage_sections) == 0: - raise DocoptLanguageError('"usage:" (case-insensitive) not found.') - if len(usage_sections) > 1: - raise DocoptLanguageError('More than one "usage:" (case-insensitive).') - DocoptExit.usage = usage_sections[0] - - options = parse_defaults(doc) - pattern = parse_pattern(formal_usage(DocoptExit.usage), options) - # [default] syntax for argument is disabled - #for a in pattern.flat(Argument): - # same_name = [d for d in arguments if d.name == a.name] - # if same_name: - # a.value = same_name[0].value - argv = parse_argv(Tokens(argv), list(options), options_first) - pattern_options = set(pattern.flat(Option)) - for options_shortcut in pattern.flat(OptionsShortcut): - doc_options = parse_defaults(doc) - options_shortcut.children = list(set(doc_options) - pattern_options) - #if any_options: - # options_shortcut.children += [Option(o.short, o.long, o.argcount) - # for o in argv if type(o) is Option] - extras(help, version, argv, doc) - matched, left, collected = pattern.fix().match(argv) - if matched and left == []: # better error message if left? - return Dict((a.name, a.value) for a in (pattern.flat() + collected)) - raise DocoptExit() diff --git a/client/requirements.txt b/client/requirements.txt index 0261b93555..f834a0bb39 100644 --- a/client/requirements.txt +++ b/client/requirements.txt @@ -1,2 +1,3 @@ +docopt>=0.6.1 PyYAML>=3.10 requests>=1.2.3 diff --git a/client/setup.py b/client/setup.py index 6a53a33a24..76e5d5ec64 100755 --- a/client/setup.py +++ b/client/setup.py @@ -24,7 +24,7 @@ KWARGS = {} if USE_SETUPTOOLS: KWARGS = { - 'install_requires': ['PyYAML', 'requests'], + 'install_requires': ['docopt', 'PyYAML', 'requests'], 'entry_points': {'console_scripts': ['deis = deis.client:main']}, } else: @@ -41,24 +41,24 @@ url='https://github.com/opdemand/deis', keywords=['opdemand', 'deis', 'cloud', 'aws', 'ec2', 'heroku', 'docker'], classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'Intended Audience :: Information Technology', - 'Intended Audience :: System Administrators', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2.7', - 'Topic :: Internet', - 'Topic :: System :: Systems Administration', - ], + 'Development Status :: 4 - Beta', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'Intended Audience :: Information Technology', + 'Intended Audience :: System Administrators', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2.7', + 'Topic :: Internet', + 'Topic :: System :: Systems Administration', + ], packages=['deis'], data_files=[ - ('.', ['README.rst']), - ], + ('.', ['README.rst']), + ], scripts=['deis/deis'], long_description=LONG_DESCRIPTION, - requires=['PyYAML', 'requests'], + requires=['docopt', 'PyYAML', 'requests'], zip_safe=True, **KWARGS) diff --git a/deis/settings.py b/deis/settings.py index 9e2cb69789..1ba49707ba 100644 --- a/deis/settings.py +++ b/deis/settings.py @@ -250,9 +250,9 @@ # EMAIL_HOST_PASSWORD = 'bar' # import other settings -from .chef_settings import * # @UnusedWildImport -from .celery_settings import * # @UnusedWildImport +from .chef_settings import * # noqa @UnusedWildImport +from .celery_settings import * # noqa @UnusedWildImport try: - from .local_settings import * # pylint: disable=W0401 @UnusedWildImport + from .local_settings import * # noqa @UnusedWildImport except ImportError: - pass + print('No deis/local_settings.py file found!') diff --git a/docs/Makefile b/docs/Makefile index ed95ae0f91..c1f6a882ac 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -22,6 +22,11 @@ I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext +docs: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The documentation pages are now in $(BUILDDIR)/html." + help: @echo "Please use \`make ' where is one of" # @echo " html to make standalone HTML files" @@ -45,12 +50,7 @@ help: # @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " docs to build the docs and copy the static files to the outputdir" @echo " server to serve the docs in your browser under \`http://localhost:8000\`" - @echo " publish to publish the app to dotcloud" - -docs: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The documentation pages are now in $(BUILDDIR)/html." + @echo " publish to publish the app to deis.io" publish: @echo "Coming soon..." diff --git a/docs/source/conf.py b/docs/source/conf.py index 4f17b4a3c8..4d691b1091 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,7 +11,8 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import os +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -32,7 +33,8 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage'] # , 'sphinx.ext.viewcode'] +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage'] +# , 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -188,22 +190,22 @@ # -- Options for LaTeX output -------------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} + # Additional stuff for the LaTeX preamble. + #'preamble': '', + } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'Deis.tex', u'Deis Documentation', - u'OpDemand, LLC', 'manual'), -] + ('index', 'Deis.tex', u'Deis Documentation', + u'OpDemand, LLC', 'manual'), + ] # The name of an image file (relative to this directory) to place at the top of # the title page. @@ -233,7 +235,7 @@ man_pages = [ ('index', 'deis', u'Deis Documentation', [u'OpDemand, LLC'], 1) -] + ] # If true, show URL addresses after external links. #man_show_urls = False @@ -245,10 +247,10 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'Deis', u'Deis Documentation', - u'OpDemand, LLC', 'Deis', 'One line description of project.', - 'Miscellaneous'), -] + ('index', 'Deis', u'Deis Documentation', + u'OpDemand, LLC', 'Deis', 'One line description of project.', + 'Miscellaneous'), + ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] diff --git a/gconfig.py b/gconfig.py index fd6bc554fe..6005ebb5ba 100644 --- a/gconfig.py +++ b/gconfig.py @@ -1,5 +1,6 @@ import multiprocessing -bind="0.0.0.0:5000" + +bind = '0.0.0.0:5000' +django_settings = 'deis.settings' workers = multiprocessing.cpu_count() * 2 + 1 -django_settings = "deis.settings" diff --git a/requirements.txt b/requirements.txt index 38868e6776..173d403599 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +# Deis' python library requirements azure>=0.7.0 boto>=2.9.8 Django>=1.5.1 @@ -6,12 +7,16 @@ django-celery>=3.0.17 django-json-field>=0.5.4 django-yamlfield>=0.4 djangorestframework>=2.3.6 -docutils>=0.11 gevent==0.13.8 gunicorn>=0.17.5 -PIL>=1.1.7 paramiko>=1.10.1 psycopg2>=2.5.1 pyCrypto>=2.6 PyYAML>=3.10 South>=0.8.1 + +# Generates template documentation in the Django admin. +# docutils>=0.11 + +# Deis developers must run "flake8" before committing code. +# flake8>=2.0 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000000..2ffe7c17f5 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,4 @@ +[flake8] +max-line-length = 100 +exclude = */api/migrations/*,*/build/*,*/venv/* +max-complexity = 12