Skip to content

Commit

Permalink
Merge pull request #5219 from dimagi/fab-ansible
Browse files Browse the repository at this point in the history
Make it easy to deploy based on an ansible inventory file
  • Loading branch information
TylerSheffels committed Jan 7, 2015
2 parents ffae6ce + 7e7aa13 commit 3d76210
Show file tree
Hide file tree
Showing 2 changed files with 74 additions and 23 deletions.
12 changes: 8 additions & 4 deletions corehq/apps/style/management/commands/update_manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,25 +22,29 @@ class Command(LabelCommand):

root_dir = settings.FILEPATH

@property
def manifest_file(self):
return os.path.join(self.root_dir, MANIFEST_FILE)

def output_manifest(self, manifest_str, is_soft_update=False):
print "saving manifest.json to disk"
if not os.path.exists(CACHE_DIR):
os.makedirs(CACHE_DIR)
if is_soft_update:
with open(os.path.join(self.root_dir, MANIFEST_FILE), 'r') as fin:
if is_soft_update and os.path.exists(self.manifest_file):
with open(self.manifest_file, 'r') as fin:
print "soft update of manifest.json"
existing_manifest = fin.read()
new_manifest_dict = json.loads(manifest_str)
existing_manifest_dict = json.loads(existing_manifest)
existing_manifest_dict.update(new_manifest_dict)
manifest_str = json.dumps(existing_manifest_dict)
with open(os.path.join(self.root_dir, MANIFEST_FILE), 'w') as fout:
with open(self.manifest_file, 'w') as fout:
print manifest_str
fout.write(manifest_str)

def save_manifest(self):
print "saving manifest.json to redis"
with open(os.path.join(self.root_dir, MANIFEST_FILE), 'r') as fin:
with open(self.manifest_file, 'r') as fin:
manifest_data = fin.read()
print manifest_data
rcache.set(COMPRESS_PREFIX % self.current_sha, manifest_data, 86400)
Expand Down
85 changes: 66 additions & 19 deletions fabfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,37 +436,81 @@ def preview():
_setup_path()


def read_inventory_file(filename):
"""
filename is a path to an ansible inventory file
returns a mapping of group names ("webworker", "proxy", etc.)
to lists of hosts (ip addresses)
"""
from ansible.inventory import InventoryParser

return {name: [host.name for host in group.get_hosts()]
for name, group in InventoryParser(filename).groups.items()}


@task
def development():
"""A development monolith target - must specify a host either by command line or prompt"""
"""
Must pass in the 'inventory' env variable, which the path to an
ansible inventory file
Example command:
fab development awesome_deploy \
--set inventory=/path/to/commcarehq-ansible/ansible/inventories/development
"""
env.sudo_user = 'cchq'
env.environment = 'development'
env.django_bind = '0.0.0.0'
env.django_port = '9010'
env.should_migrate = True

require('inventory')

# use inventory filename as environment name
# i.e. if the inventory is called my-crazy-setup
# then things on the server will be stored in
# /home/cchq/www/my-crazy-setup/code_root, etc.
env.environment = os.path.basename(env.inventory)
servers = read_inventory_file(env.inventory)

_setup_path()

webworkers = servers['webworkers']
postgresql = servers['postgresql']
couchdb = servers['couchdb']
redis = servers['redis']
memcached = servers['memcached']
# if no server specified, just don't run pillowtop
pillowtop = servers.get('pillowtop', [])

proxy = ['10.210.101.189']


env.roledefs = {
'couch': [],
'pg': [],
'rabbitmq': [],
'django_celery': [],
'sms_queue': [],
'reminder_queue': [],
'pillow_retry_queue': [],
'django_app': [],
'django_pillowtop': [],
'formsplayer': [],
'staticfiles': [],
'couch': couchdb,
'pg': postgresql,
'rabbitmq': postgresql,
'django_celery': postgresql,
'sms_queue': postgresql,
'reminder_queue': postgresql,
'pillow_retry_queue': postgresql,
'django_app': webworkers,
'django_pillowtop': pillowtop,
'formsplayer': postgresql,
'staticfiles': proxy,
'lb': [],
'deploy': [],
'deploy': postgresql,

'django_monolith': env.hosts
'django_monolith': []
}
env.roles = ['django_monolith']
env.roles = ['deploy']
env.es_endpoint = 'localhost'
env.flower_port = 5555
env.hosts = env.roledefs['deploy']


@task
@roles(ROLES_ALL_SRC)
Expand Down Expand Up @@ -1179,9 +1223,12 @@ def set_celery_supervisorconf():

@roles(ROLES_PILLOWTOP)
def set_pillowtop_supervisorconf():
# in reality this also should be another machine
# if the number of listeners gets too high
if env.environment not in ['preview']:
# Don't run for preview,
# and also don't run if there are no hosts for the 'django_pillowtop' role.
# If there are no matching roles, it's still run once
# on the 'deploy' machine, db!
# So you need to explicitly test to see if all_hosts is empty.
if env.environment not in ['preview'] and env.all_hosts:
# preview environment should not run pillowtop and index stuff
# just rely on what's on staging
_rebuild_supervisor_conf_file('make_supervisor_pillowtop_conf', 'supervisor_pillowtop.conf')
Expand Down

0 comments on commit 3d76210

Please sign in to comment.