diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template index e2071e2ade8..6432a566089 100755 --- a/conf/spark-env.sh.template +++ b/conf/spark-env.sh.template @@ -19,3 +19,4 @@ # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT # - SPARK_WORKER_INSTANCES, to set the number of worker processes per node # - SPARK_WORKER_DIR, to set the working directory of worker processes +# - SPARK_PUBLIC_DNS, to set the public dns name of the master diff --git a/sbin/start-master.sh b/sbin/start-master.sh index 3dcf7cc3483..ec3dfdb4197 100755 --- a/sbin/start-master.sh +++ b/sbin/start-master.sh @@ -40,13 +40,4 @@ if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then SPARK_MASTER_WEBUI_PORT=8080 fi -# Set SPARK_PUBLIC_DNS so the master report the correct webUI address to the slaves -if [ "$SPARK_PUBLIC_DNS" = "" ]; then - # If we appear to be running on EC2, use the public address by default: - # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname - if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then - export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname` - fi -fi - "$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT diff --git a/sbin/start-slave.sh b/sbin/start-slave.sh index 524be38c629..b563400dc24 100755 --- a/sbin/start-slave.sh +++ b/sbin/start-slave.sh @@ -23,13 +23,4 @@ sbin=`dirname "$0"` sbin=`cd "$sbin"; pwd` -# Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI -if [ "$SPARK_PUBLIC_DNS" = "" ]; then - # If we appear to be running on EC2, use the public address by default: - # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname - if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then - export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname` - fi -fi - "$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"