-
Notifications
You must be signed in to change notification settings - Fork 0
/
launchers_pbs.py
108 lines (76 loc) · 3.59 KB
/
launchers_pbs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#!/usr/bin/env python
# encoding: utf-8
"""
launchers_pbs.py
Created by Loic Matthey on 2013-05-21
Copyright (c) 2013 . All rights reserved.
"""
import os
import imp
from dataio import *
from datapbs import *
from submitpbs import *
import jobwrapper
def launcher_do_generate_submit_pbs_from_param_files(args):
'''
Generate a series of parameters to be run with PBS.
Quite general, takes its parameters from a provided parameter_files file.
(assume this is a .py file, which will be imported dynamically)
If this parameter_file defines a filtering function, uses it.
'''
print "launcher_do_generate_submit_pbs_from_param_files, generating parameters..."
all_parameters = vars(args)
# Load the parameters from the specific file, fancyyyyy
assert 'parameters_filename' in all_parameters and len(all_parameters['parameters_filename'])>0, "Parameters_filename is not set properly..."
parameters_file = imp.load_source('params', all_parameters['parameters_filename'])
##### Now generate the parameters combinations and submit everything to PBS
submit_pbs = SubmitPBS(pbs_submission_infos=parameters_file.pbs_submission_infos, debug=True)
outputs_submission = submit_pbs.generate_submit_constrained_parameters_from_module_parameters(parameters_file)
dataio = DataIO(output_folder=all_parameters['output_directory'], label=os.path.splitext(all_parameters['parameters_filename'])[0])
dataio.make_link_in_directory(source_file=all_parameters['parameters_filename'], output_dir=parameters_file.pbs_submission_infos['simul_out_dir'])
variables_to_save = ['outputs_submission', 'all_parameters']
dataio.save_variables(variables_to_save, locals())
return locals()
def launcher_do_reload_constrained_parameters(args):
'''
Reload outputs run with the automatic parameter generator for PBS
Should handle random sampling of the parameter space.
'''
all_parameters = vars(args)
# Load the parameters from the specific file
parameters_file = imp.load_source('params', all_parameters['parameters_filename'])
# Reload everything
data_pbs = DataPBS(dataset_infos=parameters_file.dataset_infos, debug=True)
# Do the plots
post_processing_outputs = []
if parameters_file.dataset_infos['post_processing'] is not None:
try:
# Duck typing to check if we have a list of post_processings
iterator = iter(parameters_file.dataset_infos['post_processing'])
except TypeError:
# Not a list... just call it
post_processing_outputs = parameters_file.dataset_infos['post_processing'](data_pbs, parameters_file.dataset_infos['launcher_module'])
else:
for post_process in iterator:
# Call each one one after the other
post_processing_outputs.append(post_process(data_pbs, parameters_file.dataset_infos['launcher_module']))
return locals()
def launcher_do_run_job(args):
'''
Instantiate and run a JobWrapper for the given parameters.
'''
job_outputs = {}
all_parameters = vars(args)
# Create a job and run it
job = jobwrapper.JobWrapper(all_parameters, session_id=all_parameters['session_id'])
print "Completed:", job.check_completed()
if not job.check_completed():
try:
job_outputs = job.compute()
finally:
# Even if an exception arises, we need to write the syncing file...
if not job.check_completed():
job.complete_job()
# Print result
print "Result:", job.get_result()
return job_outputs