Permalink
Browse files

randnum internal sweep, randnum mytardis examples added. hrmc and vas…

…p examples refactored
  • Loading branch information...
iiman committed Mar 11, 2014
1 parent 1ee4a9f commit 7fcdc96e125f0a5c5f08ab8c8fa94b35fef02c39
Showing with 702 additions and 650 deletions.
  1. +1 −3 chiminey/corestages/converge.py
  2. +1 −3 chiminey/corestages/execute.py
  3. +66 −7 chiminey/corestages/parent.py
  4. +2 −2 chiminey/corestages/strategies/cloudschedulestrategy.py
  5. +2 −2 chiminey/corestages/strategies/clusterschedulestrategy.py
  6. +4 −36 chiminey/examples/hrmc2/hrmcparent.py
  7. +0 −34 chiminey/{smartconnectorscheduler/management/commands/hrmcinitial.py → examples/hrmc2/initialise.py}
  8. +1 −1 chiminey/examples/randnumcloud/initialise.py
  9. +16 −0 chiminey/examples/randnumcloud/payload_randnum/Makefile
  10. +7 −0 chiminey/examples/randnumcloud/payload_randnum/bootstrap_done.sh
  11. +15 −0 chiminey/examples/randnumcloud/payload_randnum/process_payload/Makefile
  12. +3 −0 chiminey/examples/randnumcloud/payload_randnum/process_payload/process_running_done.sh
  13. +3 −0 chiminey/examples/randnumcloud/payload_randnum/process_payload/process_schedule_done.sh
  14. +1 −0 chiminey/examples/randnumcloud/payload_randnum/process_payload/start_process_schedule.sh
  15. +4 −0 chiminey/examples/randnumcloud/payload_randnum/process_payload/start_running_process.sh
  16. +27 −0 chiminey/examples/randnumcloud/payload_randnum/schedule_done.sh
  17. +6 −0 chiminey/examples/randnumcloud/payload_randnum/start_bootstrap.sh
  18. +13 −0 chiminey/examples/randnumcloud/payload_randnum/start_schedule.sh
  19. +59 −0 chiminey/examples/randnuminternalsweep/initialise.py
  20. +16 −0 chiminey/examples/randnuminternalsweep/payload_randnum/Makefile
  21. +7 −0 chiminey/examples/randnuminternalsweep/payload_randnum/bootstrap_done.sh
  22. +15 −0 chiminey/examples/randnuminternalsweep/payload_randnum/process_payload/Makefile
  23. +3 −0 chiminey/examples/randnuminternalsweep/payload_randnum/process_payload/process_running_done.sh
  24. +3 −0 chiminey/examples/randnuminternalsweep/payload_randnum/process_payload/process_schedule_done.sh
  25. +1 −0 chiminey/examples/randnuminternalsweep/payload_randnum/process_payload/start_process_schedule.sh
  26. +4 −0 chiminey/examples/randnuminternalsweep/payload_randnum/process_payload/start_running_process.sh
  27. +27 −0 chiminey/examples/randnuminternalsweep/payload_randnum/schedule_done.sh
  28. +6 −0 chiminey/examples/randnuminternalsweep/payload_randnum/start_bootstrap.sh
  29. +13 −0 chiminey/examples/randnuminternalsweep/payload_randnum/start_schedule.sh
  30. +2 −8 chiminey/examples/{randomnumbers2/rand2parent.py → randnuminternalsweep/randparent.py}
  31. +78 −0 chiminey/examples/randnummytardis/initialise.py
  32. +16 −0 chiminey/examples/randnummytardis/payload_randnum/Makefile
  33. +7 −0 chiminey/examples/randnummytardis/payload_randnum/bootstrap_done.sh
  34. +15 −0 chiminey/examples/randnummytardis/payload_randnum/process_payload/Makefile
  35. +3 −0 chiminey/examples/randnummytardis/payload_randnum/process_payload/process_running_done.sh
  36. +3 −0 chiminey/examples/randnummytardis/payload_randnum/process_payload/process_schedule_done.sh
  37. +1 −0 chiminey/examples/randnummytardis/payload_randnum/process_payload/start_process_schedule.sh
  38. +4 −0 chiminey/examples/randnummytardis/payload_randnum/process_payload/start_running_process.sh
  39. +27 −0 chiminey/examples/randnummytardis/payload_randnum/schedule_done.sh
  40. +6 −0 chiminey/examples/randnummytardis/payload_randnum/start_bootstrap.sh
  41. +13 −0 chiminey/examples/randnummytardis/payload_randnum/start_schedule.sh
  42. +1 −1 chiminey/examples/{randomnumbers2/rand2configure.py → randnummytardis/randconfigure.py}
  43. +1 −1 chiminey/examples/{randomnumbers2/rand2transform.py → randnummytardis/randtransform.py}
  44. +2 −4 chiminey/examples/randnumunix/initialise.py
  45. +0 −1 chiminey/examples/randomnumbers/__init__.py
  46. +0 −1 chiminey/examples/randomnumbers2/__init__.py
  47. +0 −32 chiminey/{smartconnectorscheduler/management/commands/vaspinitial.py → examples/vasp/initialise.py}
  48. +2 −4 chiminey/initialisation/coreinitial.py
  49. +27 −11 ...ney/{examples/randomnumbers/randexecute.py → smartconnectorscheduler/management/commands/hrmc.py}
  50. +1 −495 chiminey/smartconnectorscheduler/management/commands/initial.py
  51. +1 −1 chiminey/smartconnectorscheduler/management/commands/randnumcloud.py
  52. +55 −0 chiminey/smartconnectorscheduler/management/commands/randnuminternalsweep.py
  53. +55 −0 chiminey/smartconnectorscheduler/management/commands/randnummytardis.py
  54. +52 −0 chiminey/smartconnectorscheduler/management/commands/vasp.py
  55. +2 −0 payload_randomnumber/process_payload/Makefile
  56. +2 −3 payload_randomnumber/process_payload/start_running_process.sh
@@ -1,5 +1,5 @@
# Copyright (C) 2012, RMIT University
# Copyright (C) 2014, RMIT University
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
@@ -85,8 +85,6 @@ def retrieve_local_settings(run_settings, local_settings):
# # '%s/stages/create/cloud_sleep_interval' % RMIT_SCHEMA,
# # '%s/stages/create/created_nodes' % RMIT_SCHEMA,
# '%s/system/max_seed_int' % RMIT_SCHEMA,
# '%s/stages/run/compile_file' % RMIT_SCHEMA,
# '%s/stages/run/retry_attempts' % RMIT_SCHEMA,
# '%s/input/system/cloud/number_vm_instances' % RMIT_SCHEMA,
# '%s/input/hrmc/iseed' % RMIT_SCHEMA,
# '%s/input/hrmc/optimisation_scheme' % RMIT_SCHEMA,
@@ -405,7 +405,7 @@ def _upload_input_dir_variations(self, processes, local_settings,
# get run Map
parent_stage = self.import_parent_stage(run_settings)
run_map, self.rand_index = parent_stage.get_run_map(local_settings,
run_map, self.rand_index = parent_stage.get_internal_sweep_map(local_settings,
run_settings=run_settings)
# load value_map
@@ -554,8 +554,6 @@ def set_execute_settings(self, run_settings, local_settings):
'%s/stages/setup/payload_destination' % self.SCHEMA_PREFIX,
'%s/stages/setup/filename_for_PIDs' % self.SCHEMA_PREFIX,
'%s/stages/run/process_output_dirname' % self.SCHEMA_PREFIX,
'%s/stages/run/compile_file' % self.SCHEMA_PREFIX,
'%s/stages/run/retry_attempts' % self.SCHEMA_PREFIX,
'%s/system/contextid' % self.SCHEMA_PREFIX,
'%s/system/random_numbers' % self.SCHEMA_PREFIX,
'%s/system/id' % self.SCHEMA_PREFIX
@@ -20,13 +20,20 @@
import logging
import os
from itertools import product
from chiminey.runsettings import getval, SettingNotFoundException
from chiminey.storage import get_url_with_credentials, list_dirs
from chiminey.corestages.stage import Stage
logger = logging.getLogger(__name__)
class Parent(Stage):
SCHEMA_PREFIX = "http://rmit.edu.au/schemas"
"""
A list of corestages
"""
@@ -80,15 +87,67 @@ def output(self, run_settings):
run_settings[u'http://rmit.edu.au/schemas/stages/parallel/testing'][u'index'] = self.parallel_index
return run_settings
def get_run_map(self, settings, **kwargs):
def get_internal_sweep_map(self, settings, **kwargs):
rand_index = 42
map = {'val': [1]}
logger.debug('map=%s' % map)
return map, rand_index
def get_total_templates(self, maps, **kwargs):
return 1
def get_total_procs_per_iteration(self, maps, **kwargs):
try:
run_settings = kwargs['run_settings']
input_exists = self.input_exists(run_settings)
except KeyError:
input_exists = False
if input_exists:
return self._get_procs_from_input_dirs(maps, **kwargs)
else:
return self._get_procs_from_map_variations(maps)
def _get_procs_from_input_dirs(self, maps, **kwargs):
run_settings = kwargs['run_settings']
output_storage_settings = kwargs['output_storage_settings']
job_dir = kwargs['job_dir']
try:
id = getval(run_settings, '%s/system/id' % self.SCHEMA_PREFIX)
except SettingNotFoundException as e:
logger.error(e)
id = 0
iter_inputdir = os.path.join(job_dir, "input_%s" % id)
url_with_pkey = get_url_with_credentials(
output_storage_settings,
'%s://%s@%s' % (output_storage_settings['scheme'],
output_storage_settings['type'],
iter_inputdir),
is_relative_path=False)
logger.debug(url_with_pkey)
input_dirs = list_dirs(url_with_pkey)
for iter, template_map in enumerate(maps):
logger.debug("template_map=%s" % template_map)
map_keys = template_map.keys()
logger.debug("map_keys %s" % map_keys)
map_ranges = [list(template_map[x]) for x in map_keys]
product = 1
for i in map_ranges:
product = product * len(i)
total_procs = product * len(input_dirs)
logger.debug("total_procs=%d" % (total_procs))
return total_procs
def _get_procs_from_map_variations(self, maps):
contexts = []
num_variations = 0
for run_map in maps:
logger.debug("run_map=%s" % run_map)
map_keys = run_map.keys()
map_ranges = [list(run_map[x]) for x in map_keys]
logger.debug("map_ranges=%s" % map_ranges)
for z in product(*map_ranges):
context = {}
for i, k in enumerate(map_keys):
context[k] = str(z[i]) # str() so that 0 doesn't default value
contexts.append(context)
num_variations += 1
logger.debug("num_variations=%s" % num_variations)
return num_variations
@@ -151,7 +151,7 @@ def complete_schedule(schedule_class, local_settings):
def start_schedule(schedule_class, run_settings, local_settings):
parent_stage = schedule_class.import_parent_stage(run_settings)
map = parent_stage.get_run_map(local_settings, run_settings=run_settings)
map = parent_stage.get_internal_sweep_map(local_settings, run_settings=run_settings)
try:
isinstance(map, tuple)
run_map = map[0]
@@ -162,7 +162,7 @@ def start_schedule(schedule_class, run_settings, local_settings):
run_settings, 'http://rmit.edu.au/schemas/platform/storage/output')
offset = getval(run_settings, '%s/platform/storage/output/offset' % RMIT_SCHEMA)
job_dir = get_job_dir(output_storage_settings, offset)
schedule_class.total_processes = parent_stage.get_total_templates(
schedule_class.total_processes = parent_stage.get_total_procs_per_iteration(
[run_map], run_settings=run_settings,
output_storage_settings=output_storage_settings, job_dir=job_dir)
logger.debug('total_processes=%d' % schedule_class.total_processes)
@@ -143,7 +143,7 @@ def complete_schedule(schedule_class, local_settings):
def start_schedule(schedule_class, run_settings, local_settings):
parent_stage = schedule_class.import_parent_stage(run_settings)
map = parent_stage.get_run_map(local_settings, run_settings=run_settings)
map = parent_stage.get_internal_sweep_map(local_settings, run_settings=run_settings)
try:
isinstance(map, tuple)
run_map = map[0]
@@ -154,7 +154,7 @@ def start_schedule(schedule_class, run_settings, local_settings):
run_settings, 'http://rmit.edu.au/schemas/platform/storage/output')
offset = getval(run_settings, '%s/platform/storage/output/offset' % RMIT_SCHEMA)
job_dir = get_job_dir(output_storage_settings, offset)
schedule_class.total_processes = parent_stage.get_total_templates(
schedule_class.total_processes = parent_stage.get_total_procs_per_iteration(
[run_map], run_settings=run_settings,
output_storage_settings=output_storage_settings, job_dir=job_dir)
logger.debug('total_processes=%d' % schedule_class.total_processes)
@@ -50,41 +50,9 @@ def is_triggered(self, context):
def __unicode__(self):
return u"HRMCParallelStage"
def input_valid(self, settings_to_test):
logger.debug('settings_to_test=%s' % settings_to_test)
try:
sweep = getvals(settings_to_test, '%s/input/sweep' % self.SCHEMA_PREFIX)
input_platform_offset = settings_to_test[self.SCHEMA_PREFIX + '/platform/storage/input']['offset']
except SettingNotFoundException:
try:
input_location = getval(settings_to_test, '%s/input/system/input_location' % self.SCHEMA_PREFIX)
except SettingNotFoundException:
input_location = getval(settings_to_test, '%s/input/location/input/input_location' % self.SCHEMA_PREFIX)
input_platform_name, input_platform_offset = self.break_bdp_url(input_location)
settings_to_test[self.SCHEMA_PREFIX + '/platform/storage/input'] = {}
settings_to_test[self.SCHEMA_PREFIX + '/platform/storage/input'][
'platform_url'] = input_platform_name
settings_to_test[self.SCHEMA_PREFIX + '/platform/storage/input']['offset'] = input_platform_offset
input_settings = self.get_platform_settings(settings_to_test, '%s/platform/storage/input' % self.SCHEMA_PREFIX)
logger.debug('input-settings=%s' % input_settings)
input_url = "%s://%s@%s/%s/initial" % (
input_settings['scheme'], input_settings['type'],
input_settings['host'], input_platform_offset)
logger.debug('input_url=%s' % input_url)
input_url_cred = get_url_with_credentials(input_settings, input_url, is_relative_path=False)
expected_input_files = ['input_bo.dat', 'input_gr.dat', 'input_initial.xyz', 'input_sq.dat', "HRMC.inp"]
provided_input_files = get_basename(list_all_files(input_url_cred))
for fp in expected_input_files:
fp_template = "%s_template" % fp
if fp not in provided_input_files and fp_template not in provided_input_files:
logger.debug('expected file %s' % fp)
return (False, 'Expected HRMC input files under initial/ not found. Expected %s; Provided %s'
% (expected_input_files, provided_input_files))
return (True, 'valid_input')
# ["%s_template" % x for x in provided_input_files]:
def get_run_map(self, settings, **kwargs):
def get_internal_sweep_map(self, settings, **kwargs):
local_settings = settings.copy()
run_settings = kwargs['run_settings']
logger.debug('run_settings=%s' % run_settings)
@@ -192,7 +160,7 @@ def get_run_map(self, settings, **kwargs):
return map, rand_index
# #fixme: consider moving to parent class. do we need input dirs to calculate?
# def get_total_templates(self, maps, **kwargs):
# def get_total_procs_per_iteration(self, maps, **kwargs):
# run_settings = kwargs['run_settings']
# output_storage_settings = kwargs['output_storage_settings']
# job_dir = kwargs['job_dir']
@@ -223,7 +191,7 @@ def get_run_map(self, settings, **kwargs):
# logger.debug("total_templates=%d" % (total_templates))
# return total_templates
def get_total_templates(self, maps, **kwargs):
def get_total_procs_per_iteration(self, maps, **kwargs):
run_settings = kwargs['run_settings']
output_storage_settings = kwargs['output_storage_settings']
job_dir = kwargs['job_dir']
@@ -260,7 +228,7 @@ def get_total_templates(self, maps, **kwargs):
logger.debug("total_templates=%d" % (total_templates))
return total_templates
'''
def get_total_templates(self, maps, **kwargs):
def get_total_procs_per_iteration(self, maps, **kwargs):
logger.debug("maps=%s" % maps)
contexts = []
num_variations = 0
@@ -19,38 +19,11 @@
# IN THE SOFTWARE.
import logging
from django.core.management.base import BaseCommand
from chiminey.smartconnectorscheduler import models
from chiminey.initialisation import CoreInitial
logger = logging.getLogger(__name__)
MESSAGE = "This will add a new directive to the catalogue of available connectors. Are you sure [Yes/No]?"
class Command(BaseCommand):
"""
Load up the initial state of the database (replaces use of
fixtures). Assumes specific structure.
"""
args = ''
help = 'Setup an initial task structure.'
def setup(self):
confirm = raw_input(MESSAGE)
if confirm != "Yes":
print "action aborted by user"
return
directive = HRMCInitial()
directive.define_directive('hrmc', description='HRMC Smart Connector', sweep=True)
print "done"
def handle(self, *args, **options):
self.setup()
print "done"
class HRMCInitial(CoreInitial):
def define_parent_stage(self):
@@ -104,8 +77,6 @@ def define_execute_stage(self):
u'http://rmit.edu.au/schemas/stages/run':
{
u'process_output_dirname': 'HRMC2',
u'compile_file': 'HRMC',
u'retry_attempts': 3,
},
})
return execute_stage
@@ -162,8 +133,3 @@ def define_sweep_stage(self, subdirective):
},
})
return sweep_stage
def assemble_stages(self):
self.define_transform_stage()
self.define_converge_stage()
return super(HRMCInitial, self).assemble_stages()
@@ -31,7 +31,7 @@ def define_bootstrap_stage(self):
u'http://rmit.edu.au/schemas/stages/setup':
{
u'payload_source': 'local/payload_randnum',
u'payload_destination': 'randnum_cloud_dest',
u'payload_destination': 'randnum_dest',
u'payload_name': 'process_payload',
u'filename_for_PIDs': 'PIDs_collections',
},
@@ -0,0 +1,16 @@
PAYLOAD_NAME='process_payload'
IDS='proc_ids'
start_bootstrap:
@echo bootstrap starting
@sh ./start_bootstrap.sh >& bootstrap.output &
bootstrap_done:
@sh ./bootstrap_done.sh
start_schedule:
@sh ./start_schedule.sh $(PAYLOAD_NAME) $(IDS)
schedule_done:
@sh ./schedule_done.sh $(IDS)
@@ -0,0 +1,7 @@
#!/bin/sh
# version 2.0
#command -v f95 >/dev/null 2>&1 || { echo >&2 "f95 not installed Aborting."; exit 1; }
echo Environment Setup Completed
@@ -0,0 +1,15 @@
start_process_schedule:
@sh ./start_process_schedule.sh
process_schedule_done:
@sh ./process_schedule_done.sh
start_running_process:
@echo creating chiminey
@mkdir -p chiminey
@echo running
@sh ./start_running_process.sh
process_running_done:
@echo checking for running
@sh ./process_running_done.sh
@@ -0,0 +1,3 @@
#!/bin/sh
echo Process Setup Completed
@@ -0,0 +1,4 @@
#!/bin/sh
python -c 'import random; print random.random()' >& chiminey/output
python -c 'import random; print random.random()' >> chiminey/output
@@ -0,0 +1,27 @@
#!/bin/sh
IDS=$1
completed=0
completed_procs=""
procs=0
while read line
do
cd $line
procs=`expr $procs + 1 `
msg=`make process_schedule_done IDS=$IDS`
if [[ "$msg" == *"Process Setup Completed"* ]];
then
completed=`expr $completed + 1 `
completed_procs=`echo $completed_procs " $line,"`
fi
cd ..
done < $IDS
if [ $completed == $procs ];
then
echo "All processes are scheduled"
else
echo "$completed of $procs processes scheduled"
echo $completed_procs
fi
@@ -0,0 +1,6 @@
#!/bin/sh
# version 2.0
# Specify packages that are needed to run your program
# If your program is going to run on Centos VM and your program requires dos2unix,
# yum -y install dos2unix
# NB: Notice the '-y' flag.
Oops, something went wrong.

0 comments on commit 7fcdc96

Please sign in to comment.