Skip to content

Commit

Permalink
[SYSTEMML-1945] added --deploy-mode param to python scripts
Browse files Browse the repository at this point in the history
Closes #681
  • Loading branch information
krishnakalyan3 authored and nakul02 committed Oct 11, 2017
1 parent 13a0175 commit 8f786aa
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 5 deletions.
9 changes: 5 additions & 4 deletions bin/systemml-spark-submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ def default_jars(systemml_home):
return target_jars, systemml_jar


def spark_submit_entry(master, driver_memory, num_executors, executor_memory,
executor_cores, conf,
def spark_submit_entry(master, deploy_mode, driver_memory, num_executors,
executor_memory, executor_cores, conf,
nvargs, args, config, explain, debug, stats, gpu, f):
"""
This function is responsible for the execution of arguments via
Expand Down Expand Up @@ -100,7 +100,7 @@ def spark_submit_entry(master, driver_memory, num_executors, executor_memory,

# stats, explain, target_jars
cmd_spark = [spark_path, '--class', 'org.apache.sysml.api.DMLScript',
'--master', master,
'--master', master, '--deploy-mode', deploy_mode,
'--driver-memory', driver_memory,
'--conf', default_conf,
'--jars', cuda_jars, systemml_jars]
Expand Down Expand Up @@ -129,7 +129,8 @@ def spark_submit_entry(master, driver_memory, num_executors, executor_memory,
cparser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='System-ML Spark Submit Script')
# SPARK-SUBMIT Options
cparser.add_argument('--master', default='local[*]', help='local, yarn-client, yarn-cluster', metavar='')
cparser.add_argument('--master', default='local[*]', help='local, yarn', metavar='')
cparser.add_argument('--deploy-mode', help='client, cluster', default='client', metavar='')
cparser.add_argument('--driver-memory', default='8G', help='Memory for driver (e.g. 512M, 1G)', metavar='')
cparser.add_argument('--num-executors', nargs=1, help='Number of executors to launch', metavar='')
cparser.add_argument('--executor-memory', nargs=1, help='Memory per executor', metavar='')
Expand Down
3 changes: 2 additions & 1 deletion scripts/perftest/python/run_perftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,8 @@ def perf_test_entry(family, algo, exec_type, mat_type, mat_shape, config_dir, mo
'set <force> option to skip conservative memory estimates '
'and use GPU wherever possible', nargs='?', const='no_option')
# Spark Configuration Option
cparser.add_argument('--master', help='local, yarn-client, yarn-cluster', metavar='')
cparser.add_argument('--master', help='local, yarn', metavar='')
cparser.add_argument('--deploy-mode', help='client, cluster', metavar='')
cparser.add_argument('--driver-memory', help='Memory for driver (e.g. 512M)', metavar='')
cparser.add_argument('--num-executors', help='Number of executors to launch', metavar='')
cparser.add_argument('--executor-memory', help='Memory per executor', metavar='')
Expand Down
3 changes: 3 additions & 0 deletions scripts/perftest/python/utils_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,9 @@ def split_config_args(args):
if args['master'] is not None:
backend_args_dict['--master'] = args['master']

if args['deploy_mode'] is not None:
backend_args_dict['--deploy-mode'] = args['master']

if args['num_executors'] is not None:
backend_args_dict['--num-executors'] = args['num_executors']

Expand Down

0 comments on commit 8f786aa

Please sign in to comment.