Skip to content

Commit cbb06af

Browse files
authored
spark 3.4 scala nightly & docker action (#8583)
* spark 3.4 scala nightly * docker action and dockerfile * update
1 parent f015eee commit cbb06af

File tree

8 files changed

+74
-68
lines changed

8 files changed

+74
-68
lines changed

.github/actions/mac/dllib/mac-dllib-scalatest-spark3/action.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
name: 'Mac Dllib ScalaTest Spark3.1'
2-
description: 'Mac Dllib ScalaTest Spark3.1'
1+
name: 'Mac Dllib ScalaTest Spark3.4'
2+
description: 'Mac Dllib ScalaTest Spark3.4'
33
runs:
44
using: "composite"
55
steps:
@@ -12,9 +12,9 @@ runs:
1212
export KERAS_BACKEND=tensorflow
1313
rm /var/root/.m2/repository/io/netty/netty-common/4.1.50.Final/netty-common-4.1.50.Final.jar
1414
cd scala
15-
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.estimator" test -P spark_3.x -Dspark.version=3.1.3 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly' && \
16-
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.nnframes" test -P spark_3.x -Dspark.version=3.1.3 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly' && \
17-
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.keras" test -P spark_3.x -Dspark.version=3.1.3 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly'
15+
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.estimator" test -P spark_3.x -Dspark.version=3.4.1 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly' && \
16+
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.nnframes" test -P spark_3.x -Dspark.version=3.4.1 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly' && \
17+
mvn "-DwildcardSuites=com.intel.analytics.bigdl.dllib.keras" test -P spark_3.x -Dspark.version=3.4.1 -pl '!orca,!friesian,!grpc,!serving,!ppml,!assembly'
1818
cd -
1919
conda deactivate
2020
env:

.github/workflows/license-scan.yml

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -44,26 +44,26 @@ jobs:
4444
if: ${{ failure() }}
4545
run: |
4646
ls
47-
#spark3.1.3
47+
#spark3.4.1
4848
sed -i 's/<artifactId>${spark-version.project}<\/artifactId>/<artifactId>${spark-version.project}-${SPARK_PLATFORM}<\/artifactId>/' scala/dllib/pom.xml
4949
sed -i 's/<artifactId>3.0<\/artifactId>/<artifactId>3.0-${SPARK_PLATFORM}<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
50-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/pom.xml
51-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/common/spark-version/pom.xml
52-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
53-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/dllib/pom.xml
54-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/orca/pom.xml
55-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/friesian/pom.xml
56-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/grpc/pom.xml
57-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/serving/pom.xml
58-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/ppml/pom.xml
59-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/assembly/pom.xml
60-
mvn dependency:tree -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} -Dspark.version=3.1.3 -DSPARK_PLATFORM=SPARK_3.1 -P spark_3.x --file scala/pom.xml
61-
mvn clean package -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} -DskipTests -Dspark.version=3.1.3 -DSPARK_PLATFORM=SPARK_3.1 -P spark_3.x --file scala/pom.xml
50+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/pom.xml
51+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/common/spark-version/pom.xml
52+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
53+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/dllib/pom.xml
54+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/orca/pom.xml
55+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/friesian/pom.xml
56+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/grpc/pom.xml
57+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/serving/pom.xml
58+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/ppml/pom.xml
59+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/assembly/pom.xml
60+
mvn dependency:tree -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} -Dspark.version=3.4.1 -DSPARK_PLATFORM=SPARK_3.4 -P spark_3.x --file scala/pom.xml
61+
mvn clean package -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} -DskipTests -Dspark.version=3.4.1 -DSPARK_PLATFORM=SPARK_3.4 -P spark_3.x --file scala/pom.xml
6262
echo "ls -d scala/assembly/target/*"
6363
ls -d scala/assembly/target/*
6464
echo ""
65-
echo "zipinfo -1 scala/assembly/target/bigdl-assembly-spark_3.1.3-2.2.0-SNAPSHOT-dist-all.zip 'jars/*.jar'"
66-
zipinfo -1 scala/assembly/target/bigdl-assembly-spark_3.1.3-2.2.0-SNAPSHOT-dist-all.zip 'jars/*.jar'
65+
echo "zipinfo -1 scala/assembly/target/bigdl-assembly-spark_3.4.1-2.4.0-SNAPSHOT-dist-all.zip 'jars/*.jar'"
66+
zipinfo -1 scala/assembly/target/bigdl-assembly-spark_3.4.1-2.4.0-SNAPSHOT-dist-all.zip 'jars/*.jar'
6767
6868
- name: Create Job Badge
6969
if: ${{ always() }}

.github/workflows/nightly_build.yml

Lines changed: 34 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ jobs:
9292
- name: Build with Maven
9393
run: |
9494
ls
95-
#spark3.1.3
95+
#spark3.4.1
9696
cp scala/pom.xml scala/pom.xml.origin
9797
cp scala/common/spark-version/pom.xml scala/common/spark-version/pom.xml.origin
9898
cp scala/common/spark-version/3.0/pom.xml scala/common/spark-version/3.0/pom.xml.origin
@@ -106,17 +106,17 @@ jobs:
106106
107107
sed -i 's/<artifactId>${spark-version.project}<\/artifactId>/<artifactId>${spark-version.project}-${SPARK_PLATFORM}<\/artifactId>/' scala/dllib/pom.xml
108108
sed -i 's/<artifactId>3.0<\/artifactId>/<artifactId>3.0-${SPARK_PLATFORM}<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
109-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/pom.xml
110-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/common/spark-version/pom.xml
111-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
112-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/dllib/pom.xml
113-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/orca/pom.xml
114-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/friesian/pom.xml
115-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/grpc/pom.xml
116-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/serving/pom.xml
117-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/ppml/pom.xml
118-
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.3<\/artifactId>/' scala/assembly/pom.xml
119-
mvn -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} clean deploy -DskipTests -Dspark.version=3.1.3 -DSPARK_PLATFORM=SPARK_3.1 -P spark_3.x -P sign --file scala/pom.xml
109+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/pom.xml
110+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/common/spark-version/pom.xml
111+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
112+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/dllib/pom.xml
113+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/orca/pom.xml
114+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/friesian/pom.xml
115+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/grpc/pom.xml
116+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/serving/pom.xml
117+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/ppml/pom.xml
118+
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.4.1<\/artifactId>/' scala/assembly/pom.xml
119+
mvn -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} clean deploy -DskipTests -Dspark.version=3.4.1 -DSPARK_PLATFORM=SPARK_3.4 -P spark_3.x -P sign --file scala/pom.xml
120120
121121
mv scala/pom.xml.origin scala/pom.xml
122122
mv scala/common/spark-version/pom.xml.origin scala/common/spark-version/pom.xml
@@ -198,52 +198,54 @@ jobs:
198198
export IMAGE=intelanalytics/bigdl
199199
cd docker/bigdl
200200
echo "########################################"
201-
echo "################# bigdl 3.1.3 #######"
201+
echo "################# bigdl 3.4.1 #######"
202202
echo "########################################"
203203
docker build \
204204
--build-arg http_proxy=${HTTP_PROXY} \
205205
--build-arg https_proxy=${HTTPS_PROXY} \
206-
--build-arg SPARK_VERSION=3.1.3 \
206+
--build-arg SPARK_VERSION=3.4.1 \
207+
--build-arg HADOOP_VERSION=3 \
207208
--build-arg JDK_VERSION=8u192 \
208209
--build-arg JDK_URL=${JDK_URL} \
209210
--build-arg no_proxy=${NO_PROXY} \
210-
--rm --no-cache -t $IMAGE-spark-3.1.3:${TAG} .
211+
--rm --no-cache -t $IMAGE-spark-3.4.1:${TAG} .
211212
# tag 'latest'
212-
docker push ${IMAGE}-spark-3.1.3:${TAG}
213-
docker tag ${IMAGE}-spark-3.1.3:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG}
214-
docker push 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG}
213+
docker push ${IMAGE}-spark-3.4.1:${TAG}
214+
docker tag ${IMAGE}-spark-3.4.1:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG}
215+
docker push 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG}
215216
# tag SNAPSHOT
216217
export TAG_SNAPSHOT=2.4.0-SNAPSHOT
217-
docker tag 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}-spark-3.1.3:${TAG_SNAPSHOT}
218-
docker push ${IMAGE}-spark-3.1.3:${TAG_SNAPSHOT}
219-
docker rmi -f ${IMAGE}-spark-3.1.3:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}-spark-3.1.3:${TAG_SNAPSHOT}
218+
docker tag 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}-spark-3.4.1:${TAG_SNAPSHOT}
219+
docker push ${IMAGE}-spark-3.4.1:${TAG_SNAPSHOT}
220+
docker rmi -f ${IMAGE}-spark-3.4.1:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}-spark-3.4.1:${TAG_SNAPSHOT}
220221
- name: docker deploy bigdl-k8s
221222
run: |
222223
cd docker/bigdl-k8s
223224
export IMAGE=intelanalytics/bigdl-k8s
224225
export TAG=latest
225226
echo "########################################"
226-
echo "################# bigdl-k8s 3.1.3 #######"
227+
echo "################# bigdl-k8s 3.4.1 #######"
227228
echo "########################################"
228229
docker build \
229230
--build-arg http_proxy=${HTTP_PROXY} \
230231
--build-arg https_proxy=${HTTPS_PROXY} \
231-
--build-arg SPARK_VERSION=3.1.3 \
232+
--build-arg SPARK_VERSION=3.4.1 \
233+
--build-arg HADOOP_VERSION=3 \
232234
--build-arg JDK_VERSION=8u192 \
233235
--build-arg JDK_URL=${JDK_URL} \
234236
--build-arg no_proxy=${NO_PROXY} \
235-
--rm --no-cache -t ${IMAGE}-spark-3.1.3:${TAG} .
237+
--rm --no-cache -t ${IMAGE}-spark-3.4.1:${TAG} .
236238
# tag 'latest'
237-
docker push ${IMAGE}-spark-3.1.3:${TAG}
238-
docker tag ${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}:${TAG}
239+
docker push ${IMAGE}-spark-3.4.1:${TAG}
240+
docker tag ${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}:${TAG}
239241
docker push ${IMAGE}:${TAG}
240-
docker tag ${IMAGE}:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG}
241-
docker push 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG}
242+
docker tag ${IMAGE}:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG}
243+
docker push 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG}
242244
# tag SNAPSHOT
243245
export TAG_SNAPSHOT=2.4.0-SNAPSHOT
244-
docker tag 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}:${TAG_SNAPSHOT}
246+
docker tag 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}:${TAG_SNAPSHOT}
245247
docker push ${IMAGE}:${TAG_SNAPSHOT}
246-
docker rmi -f ${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.1.3:${TAG} ${IMAGE}:${TAG_SNAPSHOT}
248+
docker rmi -f ${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}:${TAG} 10.239.45.10/arda/${IMAGE}-spark-3.4.1:${TAG} ${IMAGE}:${TAG_SNAPSHOT}
247249
- name: docker deploy bigdl-chronos
248250
run: |
249251
mv docker/chronos-nightly/Dockerfile ./Dockerfile
@@ -280,7 +282,8 @@ jobs:
280282
--build-arg JDK_VERSION=8u192 \
281283
--build-arg JDK_URL=${JDK_URL} \
282284
--build-arg no_proxy=${NO_PROXY} \
283-
--build-arg SPARK_VERSION=3.1.3 \
285+
--build-arg SPARK_VERSION=3.4.1 \
286+
--build-arg HADOOP_VERSION=3 \
284287
--build-arg PY4J_VERSION=0.10.9 \
285288
--rm --no-cache -t ${IMAGE}:${TAG} .
286289
# tag 'latest'

.github/workflows/sdl_snyk_scala.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,14 +31,14 @@ jobs:
3131
cd scala
3232
snyk monitor --all-projects
3333
cd -
34-
- name: "run Snyk test scan Spark 3.1.3"
34+
- name: "run Snyk test scan Spark 3.4.1"
3535
env:
3636
no_proxy: snyk.devtools.intel.com, intel.com
3737
run: |
3838
sed -i 's/<scala.major.version>2.11<\/scala.major.version>/<scala.major.version>2.12<\/scala.major.version>/' scala/pom.xml
3939
sed -i 's/<scala.version>2.11.12<\/scala.version>/<scala.version>2.12.10<\/scala.version>/' scala/pom.xml
4040
sed -i 's/<spark-version.project>2.0<\/spark-version.project>/<spark-version.project>3.0<\/spark-version.project>/' scala/pom.xml
41-
sed -i 's/<spark.version>2.4.6<\/spark.version>/<spark.version>3.1.3<\/spark.version>/' scala/pom.xml
41+
sed -i 's/<spark.version>2.4.6<\/spark.version>/<spark.version>3.4.1<\/spark.version>/' scala/pom.xml
4242
cd scala
4343
snyk monitor --all-projects
4444
cd -

docker/bigdl-k8s/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ RUN apt-get update --fix-missing && \
6363

6464
ADD ./log4j2.xml ${SPARK_HOME}/conf/log4j2.xml
6565
RUN ln -fs /bin/bash /bin/sh
66-
RUN if [ $SPARK_VERSION = "3.1.3" ]; then \
66+
RUN if [ $SPARK_VERSION = "3.4.1" ]; then \
6767
rm $SPARK_HOME/jars/okhttp-*.jar && \
6868
wget -P $SPARK_HOME/jars https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.8.0/okhttp-3.8.0.jar; \
6969
elif [ $SPARK_VERSION = "2.4.6" ]; then \

docker/bigdl/Dockerfile

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
#
1616

1717
ARG SPARK_VERSION=2.4.6
18+
ARG HADOOP_VERSION=3
1819
ARG SPARK_HOME=/opt/spark
1920
ARG JDK_VERSION=8u192
2021
ARG JDK_URL=your_jdk_url
@@ -40,10 +41,10 @@ RUN apt-get update --fix-missing && \
4041
mv /opt/jdk* /opt/jdk$JDK_VERSION && \
4142
ln -s /opt/jdk$JDK_VERSION /opt/jdk && \
4243
# spark
43-
wget https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
44-
tar -zxvf spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
45-
mv spark-${SPARK_VERSION}-bin-hadoop2.7 /opt/spark && \
46-
rm spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
44+
wget https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
45+
tar -zxvf spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
46+
mv spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION} /opt/spark && \
47+
rm spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
4748
cp /opt/spark/kubernetes/dockerfiles/spark/entrypoint.sh /opt && \
4849
# remove log4j 1.x jars
4950
rm -f ${SPARK_HOME}/jars/log4j-1.2.17.jar && \
@@ -58,7 +59,7 @@ RUN apt-get update --fix-missing && \
5859
ADD ./log4j2.xml ${SPARK_HOME}/conf/log4j2.xml
5960
ADD ./spark-defaults.conf ${SPARK_HOME}/conf/spark-defaults.conf
6061
RUN ln -fs /bin/bash /bin/sh
61-
RUN if [ $SPARK_VERSION = "3.1.3" ]; then \
62+
RUN if [ $SPARK_VERSION = "3.4.1" ]; then \
6263
rm $SPARK_HOME/jars/okhttp-*.jar && \
6364
wget -P $SPARK_HOME/jars https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.8.0/okhttp-3.8.0.jar; \
6465
elif [ $SPARK_VERSION = "2.4.6" ]; then \

docker/friesian-serving/Dockerfile

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,16 +21,17 @@ ARG BIGDL_VERSION=2.4.0-SNAPSHOT
2121
# stage.1 spark
2222
FROM ubuntu:20.04 as spark
2323
ARG SPARK_VERSION=2.4.6
24+
ARG HADOOP=3
2425
ARG SPARK_HOME=/opt/spark
2526

2627
RUN apt-get update --fix-missing && \
2728
apt-get install -y --no-install-recommends apt-utils wget && \
2829
apt-get clean && \
2930
rm -rf /var/lib/apt/lists/* && \
30-
wget --progress=dot:giga --no-check-certificate https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
31-
tar -zxvf spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
32-
mv spark-${SPARK_VERSION}-bin-hadoop2.7 /opt/spark && \
33-
rm spark-${SPARK_VERSION}-bin-hadoop2.7.tgz && \
31+
wget --progress=dot:giga --no-check-certificate https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
32+
tar -zxvf spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
33+
mv spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION} /opt/spark && \
34+
rm spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
3435
# remove log4j 1.x jars
3536
rm -f ${SPARK_HOME}/jars/log4j-1.2.17.jar && \
3637
rm -f ${SPARK_HOME}/jars/slf4j-log4j12-1.7.16.jar && \

0 commit comments

Comments
 (0)