Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
5f43851
added data scale to output
ordavidov Nov 28, 2022
4333dbe
delegated sampling functions to legacy
ordavidov Nov 28, 2022
2d2f7cc
added logger and raise to generate_dataset
ordavidov Nov 28, 2022
aeb591a
module for Hit & Run sampling from polytopes
ordavidov Nov 28, 2022
66fe36e
added X sampler basedon Hit & Run
ordavidov Nov 28, 2022
f4ecc09
notebook for gaussian sampling in polytopes
ordavidov Nov 28, 2022
542b630
add plot in test
ordavidov Nov 28, 2022
5913899
notebook to sample in 2D, 3D and higher + tests and visualizations
ordavidov Nov 28, 2022
62e325b
improve plotting, change support to in_domain
ordavidov Nov 28, 2022
a06c592
fix warm_sigma and round_threshold in X_sampler + add tol
ordavidov Nov 28, 2022
4869538
remove comments
ordavidov Nov 28, 2022
65d82ca
add is_raised for assert errors
ordavidov Nov 29, 2022
8d60986
move mock to get_extra_input and add data_num_cpus
ordavidov Nov 29, 2022
d2e7214
add is_raised arg and adapt
ordavidov Nov 29, 2022
a080d36
add is_raised arg and adapt
ordavidov Nov 29, 2022
e3f0eae
add is_raised arg and adapt
ordavidov Nov 29, 2022
7053fcc
add is_raised arg and adapt
ordavidov Nov 29, 2022
ddf9f0e
absolute path for configs file in args
ordavidov Nov 29, 2022
2cbf5cf
update log
ordavidov Nov 29, 2022
4ea94d9
num_cpus to kwargs
ordavidov Nov 29, 2022
b7364e0
add documentation
ordavidov Nov 30, 2022
7d61d87
in main send to _dest bucket
ordavidov Nov 30, 2022
4860885
process specific num_cpus for f_dist.
ordavidov Nov 30, 2022
3cfbf4d
Pool from ray.util.multiprocessing
ordavidov Nov 30, 2022
782f121
epsilon pad constraints and remove nan from D
ordavidov Nov 30, 2022
ac3144c
specifying omega scale is now optional
ordavidov Nov 30, 2022
0bcf7a6
epsilon pad constraints for hit and run to ensure D is safely in dom(f)
ordavidov Nov 30, 2022
a17ae72
change tolerance for orientation to ensure no zeros
ordavidov Nov 30, 2022
24f6677
fix tolerence bug
ordavidov Nov 30, 2022
28a32aa
update logs
ordavidov Nov 30, 2022
9e9d578
update logs to include NaN report
ordavidov Nov 30, 2022
8875e89
objectives-dest in main
ordavidov Nov 30, 2022
f995721
solutions-dest in main
ordavidov Nov 30, 2022
5e2e81b
add data_num_cpus kwarg
ordavidov Nov 30, 2022
6cde62d
increased epsilon for epsilon constraints
ordavidov Nov 30, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 24 additions & 17 deletions doframework/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@
'mock',
'after_idle_for',
'rayvens_logs',
'alg_num_cpus'
'alg_num_cpus',
'data_num_cpus'
]
)

Expand Down Expand Up @@ -194,20 +195,20 @@ def _number_of_iterations(process_input, args, process_type):
def _get_extra_input(input_name, process_type, configs, args, buckets):
try:
if process_type == 'input':
extra = {}
extra = {'mock': args.mock}
elif process_type == 'objective':
extra = {}
extra = {'mock': args.mock}
elif process_type == 'data':
files = files_from_data(input_name)
storage = Storage(configs)
objective = storage.get(buckets['objectives_dest'],files['objective'])
extra = {'objective': json.load(objective)}
extra = {'num_cpus': args.data_num_cpus, 'objective': json.load(objective), 'mock': args.mock}
elif process_type == 'solution':
files = files_from_solution(input_name)
storage = Storage(configs)
objective = storage.get(buckets['objectives_dest'],files['objective'])
data = storage.get(buckets['data_dest'],files['data'])
extra = {'is_mcmc': args.mcmc, 'objective': json.load(objective), 'data': pd.read_csv(data)}
extra = {'is_mcmc': args.mcmc, 'objective': json.load(objective), 'data': pd.read_csv(data), 'mock': args.mock}
else:
extra = None
return extra
Expand All @@ -222,10 +223,19 @@ def _get_extra_input(input_name, process_type, configs, args, buckets):

def _process(process_type, configs, args, buckets, **kwargs):
def proc(f):

@ray.remote(num_cpus=1)
def f_dist(*args,**kwargs):
return f(*args,**kwargs)

if process_type == 'data':
@ray.remote(num_cpus=args.data_num_cpus)
def f_dist(*args,**kwargs):
return f(*args,**kwargs)
elif process_type == 'solution':
@ray.remote(num_cpus=args.alg_num_cpus)
def f_dist(*args,**kwargs):
return f(*args,**kwargs)
else:
@ray.remote(num_cpus=1)
def f_dist(*args,**kwargs):
return f(*args,**kwargs)

def inner(context, event):

Expand Down Expand Up @@ -253,12 +263,8 @@ def inner(context, event):

extra = _get_extra_input(input_name, process_type, configs, args, buckets)
assert extra is not None, 'Extra input is None for event {}.'.format(input_name)
if args.logger:
if extra:
print('({}) INFO ... Process working on event {} uses extra input {}.'.format(process_type,input_name,list(extra.keys())))
else:
print('({}) INFO ... Process working on event {} does not require extra input.'.format(process_type,input_name))
extra = {**extra,**kwargs,**configs,**{'mock': args.mock}}

extra = {**extra,**kwargs,**configs}

if args.distribute:
_ = [f_dist.remote(context, process_input, input_name, **extra) for _ in range(n)]
Expand Down Expand Up @@ -311,8 +317,9 @@ def run(generate_user_solution, configs_file, **kwargs):
after_idle_for = kwargs['after_idle_for'] if 'after_idle_for' in kwargs else 200
rayvens_logs = kwargs['rayvens_logs'] if 'rayvens_logs' in kwargs else False
alg_num_cpus = int(kwargs['alg_num_cpus']) if 'alg_num_cpus' in kwargs else 1
data_num_cpus = int(kwargs['data_num_cpus']) if 'data_num_cpus' in kwargs else 1

args = Args(objectives, datasets, feasibility_regions, run_mode, distribute, mcmc, logger, mock, after_idle_for, rayvens_logs, alg_num_cpus)
args = Args(objectives, datasets, feasibility_regions, run_mode, distribute, mcmc, logger, mock, after_idle_for, rayvens_logs, alg_num_cpus, data_num_cpus)

if args.run_mode == 'operator':
ray.init(address='auto')
Expand Down Expand Up @@ -356,7 +363,7 @@ def generate_datasets(context, process_input, input_name, **kwargs):
else:
print('({}) ERROR ... generated dataset {} not published to context. Either `s3` or `local` missing from cofnigs or both feature.'.format('objective',generated_file))

@ray.remote(num_cpus=args.alg_num_cpus)
@ray.remote(num_cpus=1)
@_process('data', configs, args, buckets, **kwargs)
def generate_solutions(context, process_input, input_name, **kwargs):
solution, generated_file = generate_user_solution(process_input, input_name, **kwargs)
Expand Down
Loading