From 19153436eea1a4d574fec34b72fbbf3077bf74d9 Mon Sep 17 00:00:00 2001 From: Sumit Awasthi Date: Fri, 21 Jul 2023 13:32:14 -0700 Subject: [PATCH] Fix formatting --- src/smspark/bootstrapper.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/smspark/bootstrapper.py b/src/smspark/bootstrapper.py index 2e83a96..ac1c46e 100644 --- a/src/smspark/bootstrapper.py +++ b/src/smspark/bootstrapper.py @@ -417,7 +417,9 @@ def get_yarn_spark_resource_config( # default parallelism default_parallelism = executor_count_total * executor_cores * 2 # total memory for one executor on the instance, leave 1GB for the Hadoop daemons - total_executor_memory = int((reduced_instance_mem_mb - constants.HADOOP_DAEMONS_MEM_MB) / executor_count_per_instance) + total_executor_memory = int( + (reduced_instance_mem_mb - constants.HADOOP_DAEMONS_MEM_MB) / executor_count_per_instance + ) # executor memory MB (90% of the total executor mem) executor_mem_mb = int(total_executor_memory * constants.EXECUTOR_MEM_INSTANCE_MEM_RATIO_ADV) # executor memory overhead MB (10% of the total executor mem) @@ -426,11 +428,7 @@ def get_yarn_spark_resource_config( driver_mem_mb = executor_mem_mb driver_mem_overhead_mb = executor_mem_overhead_mb else: - raise ValueError( - "Could not determine Spark configuration mode: {}.".format( - spark_config_mode - ) - ) + raise ValueError("Could not determine Spark configuration mode: {}.".format(spark_config_mode)) driver_gc_config = ( "-XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:MaxHeapFreeRatio=70 "