Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions .github/actions/mac/dllib/mac-dllib-scalatest-spark3/action.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name: 'Mac Dllib ScalaTest Spark3.1'
description: 'Mac Dllib ScalaTest Spark3.1'
name: 'Mac Dllib ScalaTest Spark3.4'
description: 'Mac Dllib ScalaTest Spark3.4'
runs:
using: "composite"
steps:
Expand All @@ -12,9 +12,9 @@ runs:
export KERAS_BACKEND=tensorflow
rm /var/root/.m2/repository/io/netty/netty-common/4.1.50.Final/netty-common-4.1.50.Final.jar
cd scala
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.estimator" test -P spark_3.x -Dspark.version=3.1.3 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly' && \
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.nnframes" test -P spark_3.x -Dspark.version=3.1.3 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly' && \
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.keras" test -P spark_3.x -Dspark.version=3.1.3 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly'
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.estimator" test -P spark_3.x -Dspark.version=3.4.1 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly' && \
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.nnframes" test -P spark_3.x -Dspark.version=3.4.1 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly' && \
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.keras" test -P spark_3.x -Dspark.version=3.4.1 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly'
cd -
conda deactivate
env:
Expand Down
30 changes: 15 additions & 15 deletions .github/workflows/license-scan.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,26 +44,26 @@ jobs:
if: ${{ failure() }}
run: |
ls
#spark3.1.3
#spark3.4.1
sed -i 's/<artifactId>${spark-version.project}<\/artifactId>/<artifactId>${spark-version.project}-${SPARK_PLATFORM}<\/artifactId>/' scala/dllib/pom.xml
sed -i 's/<artifactId>3.0<\/artifactId>/<artifactId>3.0-${SPARK_PLATFORM}<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/common/spark-version/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/dllib/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/orca/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/friesian/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/grpc/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/serving/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/ppml/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/assembly/pom.xml
mvn dependency:tree -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} -Dspark.version=3.1.3 -DSPARK_PLATFORM=SPARK_3.1 -P spark_3.x --file scala/pom.xml
mvn clean package -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} -DskipTests -Dspark.version=3.1.3 -DSPARK_PLATFORM=SPARK_3.1 -P spark_3.x --file scala/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/common/spark-version/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/dllib/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/orca/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/friesian/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/grpc/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/serving/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/ppml/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/assembly/pom.xml
mvn dependency:tree -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} -Dspark.version=3.4.1 -DSPARK_PLATFORM=SPARK_3.4 -P spark_3.x --file scala/pom.xml
mvn clean package -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} -DskipTests -Dspark.version=3.4.1 -DSPARK_PLATFORM=SPARK_3.4 -P spark_3.x --file scala/pom.xml
echo "ls -d scala/assembly/target/*"
ls -d scala/assembly/target/*
echo ""
echo "zipinfo -1 scala/assembly/target/bigdl-assembly-spark_3.1.3-2.2.0-SNAPSHOT-dist-all.zip 'jars/*.jar'"
zipinfo -1 scala/assembly/target/bigdl-assembly-spark_3.1.3-2.2.0-SNAPSHOT-dist-all.zip 'jars/*.jar'
echo "zipinfo -1 scala/assembly/target/bigdl-assembly-spark_3.4.1-2.4.0-SNAPSHOT-dist-all.zip 'jars/*.jar'"
zipinfo -1 scala/assembly/target/bigdl-assembly-spark_3.4.1-2.4.0-SNAPSHOT-dist-all.zip 'jars/*.jar'

- name: Create Job Badge
if: ${{ always() }}
Expand Down
65 changes: 34 additions & 31 deletions .github/workflows/nightly_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ jobs:
- name: Build with Maven
run: |
ls
#spark3.1.3
#spark3.4.1
cp scala/pom.xml scala/pom.xml.origin
cp scala/common/spark-version/pom.xml scala/common/spark-version/pom.xml.origin
cp scala/common/spark-version/3.0/pom.xml scala/common/spark-version/3.0/pom.xml.origin
Expand All @@ -106,17 +106,17 @@ jobs:

sed -i 's/<artifactId>${spark-version.project}<\/artifactId>/<artifactId>${spark-version.project}-${SPARK_PLATFORM}<\/artifactId>/' scala/dllib/pom.xml
sed -i 's/<artifactId>3.0<\/artifactId>/<artifactId>3.0-${SPARK_PLATFORM}<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/common/spark-version/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/dllib/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/orca/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/friesian/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/grpc/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/serving/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/ppml/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/assembly/pom.xml
mvn -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} clean deploy -DskipTests -Dspark.version=3.1.3 -DSPARK_PLATFORM=SPARK_3.1 -P spark_3.x -P sign --file scala/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/common/spark-version/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/dllib/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/orca/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/friesian/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/grpc/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/serving/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/ppml/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/assembly/pom.xml
mvn -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} clean deploy -DskipTests -Dspark.version=3.4.1 -DSPARK_PLATFORM=SPARK_3.4 -P spark_3.x -P sign --file scala/pom.xml

mv scala/pom.xml.origin scala/pom.xml
mv scala/common/spark-version/pom.xml.origin scala/common/spark-version/pom.xml
Expand Down Expand Up @@ -198,52 +198,54 @@ jobs:
export IMAGE=intelanalytics/bigdl
cd docker/bigdl
echo "########################################"
echo "################# bigdl 3.1.3 #######"
echo "################# bigdl 3.4.1 #######"
echo "########################################"
docker build \
--build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \
--build-arg SPARK_VERSION=3.1.3 \
--build-arg SPARK_VERSION=3.4.1 \
--build-arg HADOOP_VERSION=3 \
--build-arg JDK_VERSION=8u192 \
--build-arg JDK_URL=${JDK_URL} \
--build-arg no_proxy=${NO_PROXY} \
--rm --no-cache -t $IMAGE-spark-3.1.3:${TAG} .
--rm --no-cache -t $IMAGE-spark-3.4.1:${TAG} .
# tag 'latest'
docker push ${IMAGE}-spark-3.1.3:${TAG}
docker tag ${IMAGE}-spark-3.1.3:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG}
docker push 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG}
docker push ${IMAGE}-spark-3.4.1:${TAG}
docker tag ${IMAGE}-spark-3.4.1:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG}
docker push 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG}
# tag SNAPSHOT
export TAG_SNAPSHOT=2.4.0-SNAPSHOT
docker tag 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}-spark-3.1.3:${TAG_SNAPSHOT}
docker push ${IMAGE}-spark-3.1.3:${TAG_SNAPSHOT}
docker rmi -f ${IMAGE}-spark-3.1.3:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}-spark-3.1.3:${TAG_SNAPSHOT}
docker tag 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}-spark-3.4.1:${TAG_SNAPSHOT}
docker push ${IMAGE}-spark-3.4.1:${TAG_SNAPSHOT}
docker rmi -f ${IMAGE}-spark-3.4.1:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}-spark-3.4.1:${TAG_SNAPSHOT}
- name: docker deploy bigdl-k8s
run: |
cd docker/bigdl-k8s
export IMAGE=intelanalytics/bigdl-k8s
export TAG=latest
echo "########################################"
echo "################# bigdl-k8s 3.1.3 #######"
echo "################# bigdl-k8s 3.4.1 #######"
echo "########################################"
docker build \
--build-arg http_proxy=${HTTP_PROXY} \
--build-arg https_proxy=${HTTPS_PROXY} \
--build-arg SPARK_VERSION=3.1.3 \
--build-arg SPARK_VERSION=3.4.1 \
--build-arg HADOOP_VERSION=3 \
--build-arg JDK_VERSION=8u192 \
--build-arg JDK_URL=${JDK_URL} \
--build-arg no_proxy=${NO_PROXY} \
--rm --no-cache -t ${IMAGE}-spark-3.1.3:${TAG} .
--rm --no-cache -t ${IMAGE}-spark-3.4.1:${TAG} .
# tag 'latest'
docker push ${IMAGE}-spark-3.1.3:${TAG}
docker tag ${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}:${TAG}
docker push ${IMAGE}-spark-3.4.1:${TAG}
docker tag ${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}:${TAG}
docker push ${IMAGE}:${TAG}
docker tag ${IMAGE}:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG}
docker push 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG}
docker tag ${IMAGE}:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG}
docker push 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG}
# tag SNAPSHOT
export TAG_SNAPSHOT=2.4.0-SNAPSHOT
docker tag 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}:${TAG_SNAPSHOT}
docker tag 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}:${TAG_SNAPSHOT}
docker push ${IMAGE}:${TAG_SNAPSHOT}
docker rmi -f ${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}:${TAG_SNAPSHOT}
docker rmi -f ${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}:${TAG_SNAPSHOT}
- name: docker deploy bigdl-chronos
run: |
mv docker/chronos-nightly/Dockerfile ./Dockerfile
Expand Down Expand Up @@ -280,7 +282,8 @@ jobs:
--build-arg JDK_VERSION=8u192 \
--build-arg JDK_URL=${JDK_URL} \
--build-arg no_proxy=${NO_PROXY} \
--build-arg SPARK_VERSION=3.1.3 \
--build-arg SPARK_VERSION=3.4.1 \
--build-arg HADOOP_VERSION=3 \
--build-arg PY4J_VERSION=0.10.9 \
--rm --no-cache -t ${IMAGE}:${TAG} .
# tag 'latest'
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/sdl_snyk_scala.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,14 @@ jobs:
cd scala
snyk monitor --all-projects
cd -
- name: "run Snyk test scan Spark 3.1.3"
- name: "run Snyk test scan Spark 3.4.1"
env:
no_proxy: snyk.devtools.intel.com, intel.com
run: |
sed -i 's/<scala.major.version>2.11<\/scala.major.version>/<scala.major.version>2.12<\/scala.major.version>/' scala/pom.xml
sed -i 's/<scala.version>2.11.12<\/scala.version>/<scala.version>2.12.10<\/scala.version>/' scala/pom.xml
sed -i 's/<spark-version.project>2.0<\/spark-version.project>/<spark-version.project>3.0<\/spark-version.project>/' scala/pom.xml
sed -i 's/<spark.version>2.4.6<\/spark.version>/<spark.version>3.1.3<\/spark.version>/' scala/pom.xml
sed -i 's/<spark.version>2.4.6<\/spark.version>/<spark.version>3.4.1<\/spark.version>/' scala/pom.xml
cd scala
snyk monitor --all-projects
cd -
Expand Down
2 changes: 1 addition & 1 deletion docker/bigdl-k8s/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ RUN apt-get update --fix-missing && \

ADD ./log4j2.xml ${SPARK_HOME}/conf/log4j2.xml
RUN ln -fs /bin/bash /bin/sh
RUN if [ $SPARK_VERSION = "3.1.3" ]; then \
RUN if [ $SPARK_VERSION = "3.4.1" ]; then \
rm $SPARK_HOME/jars/okhttp-*.jar && \
wget -P $SPARK_HOME/jars https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.8.0/okhttp-3.8.0.jar; \
elif [ $SPARK_VERSION = "2.4.6" ]; then \
Expand Down
11 changes: 6 additions & 5 deletions docker/bigdl/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
#

ARG SPARK_VERSION=2.4.6
ARG HADOOP_VERSION=3
ARG SPARK_HOME=/opt/spark
ARG JDK_VERSION=8u192
ARG JDK_URL=your_jdk_url
Expand All @@ -40,10 +41,10 @@ RUN apt-get update --fix-missing && \
mv /opt/jdk* /opt/jdk$JDK_VERSION && \
ln -s /opt/jdk$JDK_VERSION /opt/jdk && \
# spark
wget https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
tar -zxvf spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
mv spark-${SPARK_VERSION}-bin-hadoop2.7 /opt/spark && \
rm spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
wget https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
tar -zxvf spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
mv spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION} /opt/spark && \
rm spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
cp /opt/spark/kubernetes/dockerfiles/spark/entrypoint.sh /opt && \
# remove log4j 1.x jars
rm -f ${SPARK_HOME}/jars/log4j-1.2.17.jar && \
Expand All @@ -58,7 +59,7 @@ RUN apt-get update --fix-missing && \
ADD ./log4j2.xml ${SPARK_HOME}/conf/log4j2.xml
ADD ./spark-defaults.conf ${SPARK_HOME}/conf/spark-defaults.conf
RUN ln -fs /bin/bash /bin/sh
RUN if [ $SPARK_VERSION = "3.1.3" ]; then \
RUN if [ $SPARK_VERSION = "3.4.1" ]; then \
rm $SPARK_HOME/jars/okhttp-*.jar && \
wget -P $SPARK_HOME/jars https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.8.0/okhttp-3.8.0.jar; \
elif [ $SPARK_VERSION = "2.4.6" ]; then \
Expand Down
9 changes: 5 additions & 4 deletions docker/friesian-serving/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,17 @@ ARG BIGDL_VERSION=2.4.0-SNAPSHOT
# stage.1 spark
FROM ubuntu:20.04 as spark
ARG SPARK_VERSION=2.4.6
ARG HADOOP=3
ARG SPARK_HOME=/opt/spark

RUN apt-get update --fix-missing && \
apt-get install -y --no-install-recommends apt-utils wget && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* && \
wget --progress=dot:giga --no-check-certificate https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
tar -zxvf spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
mv spark-${SPARK_VERSION}-bin-hadoop2.7 /opt/spark && \
rm spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
wget --progress=dot:giga --no-check-certificate https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
tar -zxvf spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
mv spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION} /opt/spark && \
rm spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
# remove log4j 1.x jars
rm -f ${SPARK_HOME}/jars/log4j-1.2.17.jar && \
rm -f ${SPARK_HOME}/jars/slf4j-log4j12-1.7.16.jar && \
Expand Down
Loading