From d022598c4a21f29ffa377af76261fea361fddba1 Mon Sep 17 00:00:00 2001 From: ringtail Date: Fri, 8 May 2020 21:47:16 +0800 Subject: [PATCH] change entrypoint of history server --- .../spark/historyserver/Dockerfile | 4 +- .../spark/historyserver/entrypoint.sh | 90 +++---------------- 2 files changed, 14 insertions(+), 80 deletions(-) diff --git a/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/historyserver/Dockerfile b/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/historyserver/Dockerfile index a0af1271129cf..2b9fe285fc80c 100644 --- a/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/historyserver/Dockerfile +++ b/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/historyserver/Dockerfile @@ -3,8 +3,10 @@ FROM $base_img # Add dependency for alibaba cloud ADD https://repo1.maven.org/maven2/com/aliyun/odps/hadoop-fs-oss/3.3.8-public/hadoop-fs-oss-3.3.8-public.jar $SPARK_HOME/jars ADD https://repo1.maven.org/maven2/com/aliyun/oss/aliyun-sdk-oss/3.8.1/aliyun-sdk-oss-3.8.1.jar $SPARK_HOME/jars - +ADD https://repo1.maven.org/maven2/org/aspectj/aspectjweaver/1.9.5/aspectjweaver-1.9.5.jar $SPARK_HOME/jars +ADD https://repo1.maven.org/maven2/org/jdom/jdom/1.1.3/jdom-1.1.3.jar $SPARK_HOME/jars # change default entrypoint ADD resource-managers/kubernetes/docker/src/main/dockerfiles/spark/historyserver/entrypoint.sh /opt/entrypoint.sh +RUN chmod +x /opt/entrypoint.sh ENTRYPOINT [ "/opt/entrypoint.sh" ] diff --git a/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/historyserver/entrypoint.sh b/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/historyserver/entrypoint.sh index 972c55284e4f0..5f297fdf84e08 100644 --- a/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/historyserver/entrypoint.sh +++ b/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/historyserver/entrypoint.sh @@ -3,94 +3,26 @@ # echo commands to the terminal output set -ex -enablePVC= - -enableOSS= -alibabaCloudOSSEndpoint= -alibabaCloudAccessKeyId= -alibabaCloudAccessKeySecret= - -eventsDir= - -function usage { - cat<< EOF - Usage: entrypoint.sh [OPTIONS] - Options: - --pvc Enable PVC - --oss accessKeyId accessKeySecret ossEndpoint Enable Alibaba Cloud OSS - --events-dir events-dir Set events dir - -h | --help Prints this message. -EOF -} - -function parse_args { - while [[ $# -gt 0 ]] - do - case "$1" in - --pvc) - enablePVC=true - shift - continue - ;; - --oss) - if [[ -n "$4" ]]; then - enableOSS=true - alibabaCloudAccessKeyId=$2 - alibabaCloudAccessKeySecret=$3 - alibabaCloudOSSEndpoint=$4 - shift 4 - continue - else - printf '"--alibaba" require four non-empty option arguments.\n' - usage - exit 1 - fi - ;; - --events-dir) - if [[ -n "$2" ]]; then - eventsDir=$2 - shift 2 - continue - else - printf '"--events-dir" requires a non-empty option argument.\n' - usage - exit 1 - fi - ;; - -h|--help) - usage - exit 0 - ;; - --) - shift - break - ;; - '') - break - ;; - *) - printf "Unrecognized option: $1\n" - exit 1 - ;; - esac - shift - done -} - -parse_args "$@" - if [[ "$enablePVC" == "true" ]]; then export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=file:/mnt/$eventsDir"; elif [[ "$enableOSS" == "true" ]];then export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS \ -Dspark.history.fs.logDirectory=$eventsDir \ -Dspark.hadoop.fs.oss.endpoint=$alibabaCloudOSSEndpoint \ - -Dspark.hadoop.fs.oss.accessKeySecret=$alibabaCloudAccessKeyId \ - -Dspark.hadoop.fs.oss.accessKeyId=$alibabaCloudAccessKeySecret \ + -Dspark.hadoop.fs.oss.accessKeySecret=$alibabaCloudAccessKeySecret \ + -Dspark.hadoop.fs.oss.accessKeyId=$alibabaCloudAccessKeyId \ -Dspark.hadoop.fs.oss.impl=org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem"; else export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS \ -Dspark.history.fs.logDirectory=$eventsDir"; fi; -exec /sbin/tini -s -- /opt/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer +if [ -z "${SPARK_HOME}" ]; then + export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)" +fi + +. "${SPARK_HOME}/sbin/spark-config.sh" +. "${SPARK_HOME}/bin/load-spark-env.sh" + +exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 "$@" +